code stringlengths 75 104k | docstring stringlengths 1 46.9k |
|---|---|
def determine_packages_to_sync(self):
"""
Update the self.packages_to_sync to contain packages that need to be
synced.
"""
# In case we don't find any changes we will stay on the currently
# synced serial.
self.target_serial = self.synced_serial
self.packa... | Update the self.packages_to_sync to contain packages that need to be
synced. |
def changed_fields(self, from_db=False):
"""
Args:
from_db (bool): Check changes against actual db data
Returns:
list: List of fields names which their values changed.
"""
if self.exist:
current_dict = self.clean_value()
# `from_db`... | Args:
from_db (bool): Check changes against actual db data
Returns:
list: List of fields names which their values changed. |
def to_internal_value(self, data):
"""
Dicts of native values <- Dicts of primitive datatypes.
"""
if html.is_html_input(data):
data = html.parse_html_dict(data)
if not isinstance(data, dict):
self.fail('not_a_dict', input_type=type(data).__name__)
... | Dicts of native values <- Dicts of primitive datatypes. |
def add_flooded_field(self, shapefile_path):
"""Create the layer from the local shp adding the flooded field.
.. versionadded:: 3.3
Use this method to add a calculated field to a shapefile. The shapefile
should have a field called 'count' containing the number of flood
reports ... | Create the layer from the local shp adding the flooded field.
.. versionadded:: 3.3
Use this method to add a calculated field to a shapefile. The shapefile
should have a field called 'count' containing the number of flood
reports for the field. The field values will be set to 0 if the ... |
def get_log_form(self, *args, **kwargs):
"""Pass through to provider LogAdminSession.get_log_form_for_update"""
# Implemented from kitosid template for -
# osid.resource.BinAdminSession.get_bin_form_for_update_template
# This method might be a bit sketchy. Time will tell.
if isin... | Pass through to provider LogAdminSession.get_log_form_for_update |
def setIcon(self, icon):
"""
Sets the icon for this hotspot. If this method is called with a valid
icon, then the style will automatically switch to Icon, otherwise,
the style will be set to Invisible.
:param icon | <QIcon> || <str> || None
"""
... | Sets the icon for this hotspot. If this method is called with a valid
icon, then the style will automatically switch to Icon, otherwise,
the style will be set to Invisible.
:param icon | <QIcon> || <str> || None |
def overlap(self, value):
"""
Set the value of the ``<c:overlap>`` child element to *int_value*,
or remove the overlap element if *int_value* is 0.
"""
if value == 0:
self._element._remove_overlap()
return
self._element.get_or_add_overlap().val = v... | Set the value of the ``<c:overlap>`` child element to *int_value*,
or remove the overlap element if *int_value* is 0. |
def header(self, k, v, replace=True):
""" Sets header value. Replaces existing value if `replace` is True.
Otherwise create a list of existing values and `v`
:param k: Header key
:param v: Header value
:param replace: flag for setting mode.
:type k: str
:type v: ... | Sets header value. Replaces existing value if `replace` is True.
Otherwise create a list of existing values and `v`
:param k: Header key
:param v: Header value
:param replace: flag for setting mode.
:type k: str
:type v: str
:type replace: bool |
def create_line_plot(df):
""" create a mg line plot
Args:
df (pandas.DataFrame): data to plot
"""
fig = Figure("/mg/line_plot/", "mg_line_plot")
fig.graphics.transition_on_update(True)
fig.graphics.animate_on_load()
fig.layout.set_size(width=450, height=200)
fig.layout.s... | create a mg line plot
Args:
df (pandas.DataFrame): data to plot |
def disconnect(self, output_port, input_port):
"""
Remove a connection between (two ports of) :class:`.Effect` instances.
For this, is necessary informs the output port origin and the input port destination::
>>> pedalboard.append(driver)
>>> pedalboard.append(reverb)
... | Remove a connection between (two ports of) :class:`.Effect` instances.
For this, is necessary informs the output port origin and the input port destination::
>>> pedalboard.append(driver)
>>> pedalboard.append(reverb)
>>> driver_output = driver.outputs[0]
>>> rev... |
def install(root=None, expose=None):
"""Installs the default :class:`VendorImporter` for PEX vendored code.
Any distributions listed in ``expose`` will also be exposed for direct import; ie:
``install(expose=['setuptools'])`` would make both ``setuptools`` and ``wheel`` available for
import via ``from pex.thi... | Installs the default :class:`VendorImporter` for PEX vendored code.
Any distributions listed in ``expose`` will also be exposed for direct import; ie:
``install(expose=['setuptools'])`` would make both ``setuptools`` and ``wheel`` available for
import via ``from pex.third_party import setuptools, wheel``, but o... |
def remove_counter(self, key, path, consistency_level):
"""
Remove a counter at the specified location.
Note that counters have limited support for deletes: if you remove a counter, you must wait to issue any following update
until the delete has reached all the nodes and all of them have been fully com... | Remove a counter at the specified location.
Note that counters have limited support for deletes: if you remove a counter, you must wait to issue any following update
until the delete has reached all the nodes and all of them have been fully compacted.
Parameters:
- key
- path
- consistency_l... |
def create_script(create=None): # noqa: E501
"""Create a new script
Create a new script # noqa: E501
:param Scripts: The data needed to create this script
:type Scripts: dict | bytes
:rtype: Response
"""
if connexion.request.is_json:
create = Create.from_dict(connexion.request.ge... | Create a new script
Create a new script # noqa: E501
:param Scripts: The data needed to create this script
:type Scripts: dict | bytes
:rtype: Response |
def create(cls, name, nat=False, mobile_vpn_toplogy_mode=None,
vpn_profile=None):
"""
Create a new policy based VPN
:param name: name of vpn policy
:param bool nat: whether to apply NAT to the VPN (default False)
:param mobile_vpn_toplogy_mode: whether to allow re... | Create a new policy based VPN
:param name: name of vpn policy
:param bool nat: whether to apply NAT to the VPN (default False)
:param mobile_vpn_toplogy_mode: whether to allow remote vpn
:param VPNProfile vpn_profile: reference to VPN profile, or uses default
:rtype: PolicyVPN |
def gsea_significance(enrichment_scores, enrichment_nulls):
"""Compute nominal pvals, normalized ES, and FDR q value.
For a given NES(S) = NES* >= 0. The FDR is the ratio of the percentage of all (S,pi) with
NES(S,pi) >= 0, whose NES(S,pi) >= NES*, divided by the percentage of
observed S wi... | Compute nominal pvals, normalized ES, and FDR q value.
For a given NES(S) = NES* >= 0. The FDR is the ratio of the percentage of all (S,pi) with
NES(S,pi) >= 0, whose NES(S,pi) >= NES*, divided by the percentage of
observed S wih NES(S) >= 0, whose NES(S) >= NES*, and similarly if NES(S) = NES*... |
def _register_key(fingerprint, gpg):
"""Registers key in config"""
for private_key in gpg.list_keys(True):
try:
if str(fingerprint) == private_key['fingerprint']:
config["gpg_key_fingerprint"] = \
repr(private_key['fingerprint'])
except KeyError:
... | Registers key in config |
def file_md5sum(filename):
"""
:param filename: The filename of the file to process
:returns: The MD5 hash of the file
"""
hash_md5 = hashlib.md5()
with open(filename, 'rb') as f:
for chunk in iter(lambda: f.read(1024 * 4), b''):
hash_md5.update(chunk)
return hash_md5.hex... | :param filename: The filename of the file to process
:returns: The MD5 hash of the file |
def scrollTextIntoView(self, text):
'''
Performs a forward scroll action on the scrollable layout element until the text you provided is visible,
or until swipe attempts have been exhausted. See setMaxSearchSwipes(int)
'''
if self.vc is None:
raise ValueError('vc mus... | Performs a forward scroll action on the scrollable layout element until the text you provided is visible,
or until swipe attempts have been exhausted. See setMaxSearchSwipes(int) |
def check_version(url=VERSION_URL):
"""Returns the version string for the latest SDK."""
for line in get(url):
if 'release:' in line:
return line.split(':')[-1].strip(' \'"\r\n') | Returns the version string for the latest SDK. |
def processFlat(self):
"""Main process.for flat segmentation.
Returns
-------
est_idxs : np.array(N)
Estimated times for the segment boundaries in frame indeces.
est_labels : np.array(N-1)
Estimated labels for the segments.
"""
self.config[... | Main process.for flat segmentation.
Returns
-------
est_idxs : np.array(N)
Estimated times for the segment boundaries in frame indeces.
est_labels : np.array(N-1)
Estimated labels for the segments. |
def send_calibrate_accelerometer(self, simple=False):
"""Request accelerometer calibration.
:param simple: if True, perform simple accelerometer calibration
"""
calibration_command = self.message_factory.command_long_encode(
self._handler.target_system, 0, # target_system,... | Request accelerometer calibration.
:param simple: if True, perform simple accelerometer calibration |
def transform(self, X, y=None):
"""Transform X to a cluster-distance space.
In the new space, each dimension is the cosine distance to the cluster
centers. Note that even if X is sparse, the array returned by
`transform` will typically be dense.
Parameters
----------
... | Transform X to a cluster-distance space.
In the new space, each dimension is the cosine distance to the cluster
centers. Note that even if X is sparse, the array returned by
`transform` will typically be dense.
Parameters
----------
X : {array-like, sparse matrix}, shap... |
def insert_break(lines, break_pos=9):
"""
Insert a <!--more--> tag for larger release notes.
Parameters
----------
lines : list of str
The content of the release note.
break_pos : int
Line number before which a break should approximately be inserted.
Returns
-------
... | Insert a <!--more--> tag for larger release notes.
Parameters
----------
lines : list of str
The content of the release note.
break_pos : int
Line number before which a break should approximately be inserted.
Returns
-------
list of str
The text with the inserted ta... |
def ser2ber(q,n,d,t,ps):
"""
Converts symbol error rate to bit error rate. Taken from Ziemer and
Tranter page 650. Necessary when comparing different types of block codes.
parameters
----------
q: size of the code alphabet for given modulation type (BPSK=2)
n: number of channel bits
... | Converts symbol error rate to bit error rate. Taken from Ziemer and
Tranter page 650. Necessary when comparing different types of block codes.
parameters
----------
q: size of the code alphabet for given modulation type (BPSK=2)
n: number of channel bits
d: distance (2e+1) where e is the ... |
def get_step_index(self, step=None):
"""
Returns the index for the given `step` name. If no step is given,
the current step will be used to get the index.
"""
if step is None:
step = self.steps.current
return self.get_form_list().keyOrder.index(step) | Returns the index for the given `step` name. If no step is given,
the current step will be used to get the index. |
def _SetupPaths():
"""Sets up the sys.path with special directories for endpointscfg.py."""
sdk_path = _FindSdkPath()
if sdk_path:
sys.path.append(sdk_path)
try:
import dev_appserver # pylint: disable=g-import-not-at-top
if hasattr(dev_appserver, 'fix_sys_path'):
dev_appserver.fix_sys... | Sets up the sys.path with special directories for endpointscfg.py. |
def update_mapping_meta(self, doc_type, values, indices=None):
"""
Update mapping meta
:param doc_type: a doc type or a list of doctypes
:param values: the dict of meta
:param indices: a list of indices
:return:
"""
indices = self._validate_indices(indices... | Update mapping meta
:param doc_type: a doc type or a list of doctypes
:param values: the dict of meta
:param indices: a list of indices
:return: |
def write_chunks(out, chunks):
"""Create a PNG file by writing out the chunks."""
out.write(png_signature)
for chunk in chunks:
write_chunk(out, *chunk) | Create a PNG file by writing out the chunks. |
def install_napp(cls, mgr):
"""Install a NApp.
Raises:
KytosException: If a NApp hasn't been found.
"""
try:
LOG.info(' Searching local NApp...')
mgr.install_local()
LOG.info(' Found and installed.')
except FileNotFoundError... | Install a NApp.
Raises:
KytosException: If a NApp hasn't been found. |
def change_key(self, key):
""" re-encrypt stored services and orgs with the new key """
services = {}
for service_name in self.list_services():
services[service_name] = self.get_service(service_name)
orgs = {}
for org_name in self.list_orgs():
orgs[org_n... | re-encrypt stored services and orgs with the new key |
def get_protein_seq_for_transcript(self, transcript_id):
""" obtain the sequence for a transcript from ensembl
"""
headers = {"content-type": "text/plain"}
self.attempt = 0
ext = "/sequence/id/{}?type=protein".format(transcript_id)
return self.e... | obtain the sequence for a transcript from ensembl |
def wsgi_server_target(self, wsgi_environment, response_start):
"""
Searches route in self.routes and passes found target wsgi_environment,
response_start and route_result. explanation for route_result is in
Route class.
result from ranning this method is simply passed from targ... | Searches route in self.routes and passes found target wsgi_environment,
response_start and route_result. explanation for route_result is in
Route class.
result from ranning this method is simply passed from target to carafe
calling fuctionality. see Carafe class for explanations to this... |
def inherit_set(base, namespace, attr_name,
inherit=lambda i: True):
"""
Perform inheritance of sets. Returns a list of items that
were inherited, for post-processing.
:param base: The base class being considered; see
``iter_bases()``.
:... | Perform inheritance of sets. Returns a list of items that
were inherited, for post-processing.
:param base: The base class being considered; see
``iter_bases()``.
:param namespace: The dictionary of the new class being built.
:param attr_name: The name of the attri... |
def send_request(self, worker_class_or_function, args, on_receive=None):
"""
Requests some work to be done by the backend. You can get notified of
the work results by passing a callback (on_receive).
:param worker_class_or_function: Worker class or function
:param args: worker a... | Requests some work to be done by the backend. You can get notified of
the work results by passing a callback (on_receive).
:param worker_class_or_function: Worker class or function
:param args: worker args, any Json serializable objects
:param on_receive: an optional callback executed w... |
def find_words(text, suspect_words, excluded_words=[]):
"""Check if a text has some of the suspect words (or words that starts with
one of the suspect words). You can set some words to be excluded of the
search, so you can remove false positives like 'important' be detected when
you search by 'import'. ... | Check if a text has some of the suspect words (or words that starts with
one of the suspect words). You can set some words to be excluded of the
search, so you can remove false positives like 'important' be detected when
you search by 'import'. It will return True if the number of suspect words
found is... |
def delete(filething):
""" delete(filething)
Arguments:
filething (filething)
Raises:
mutagen.MutagenError
Remove tags from a file.
"""
t = OggFLAC(filething)
filething.fileobj.seek(0)
t.delete(filething) | delete(filething)
Arguments:
filething (filething)
Raises:
mutagen.MutagenError
Remove tags from a file. |
async def variant(self, elem=None, elem_type=None, params=None, obj=None):
"""
Loads/dumps variant type
:param elem:
:param elem_type:
:param params:
:param obj:
:return:
"""
elem_type = elem_type if elem_type else elem.__class__
if hasatt... | Loads/dumps variant type
:param elem:
:param elem_type:
:param params:
:param obj:
:return: |
def count_seeds(usort):
"""
uses bash commands to quickly count N seeds from utemp file
"""
with open(usort, 'r') as insort:
cmd1 = ["cut", "-f", "2"]
cmd2 = ["uniq"]
cmd3 = ["wc"]
proc1 = sps.Popen(cmd1, stdin=insort, stdout=sps.PIPE, close_fds=True)
proc2 = sps.... | uses bash commands to quickly count N seeds from utemp file |
def write_config(self):
"""Write the current configuration to the config file."""
config_file = os.path.join(self.config_dir, 'pueue.ini')
with open(config_file, 'w') as file_descriptor:
self.config.write(file_descriptor) | Write the current configuration to the config file. |
def get_include_files():
""""Get the list of trust stores so they properly packaged when doing a cx_freeze build.
"""
plugin_data_files = []
trust_stores_pem_path = path.join(root_path, 'sslyze', 'plugins', 'utils', 'trust_store', 'pem_files')
for file in listdir(trust_stores_pem_path):
file... | Get the list of trust stores so they properly packaged when doing a cx_freeze build. |
def r2_score(y_true, y_pred):
"""R² for Bayesian regression models. Only valid for linear models.
Parameters
----------
y_true: : array-like of shape = (n_samples) or (n_samples, n_outputs)
Ground truth (correct) target values.
y_pred : array-like of shape = (n_samples) or (n_samples, n_out... | R² for Bayesian regression models. Only valid for linear models.
Parameters
----------
y_true: : array-like of shape = (n_samples) or (n_samples, n_outputs)
Ground truth (correct) target values.
y_pred : array-like of shape = (n_samples) or (n_samples, n_outputs)
Estimated target values... |
def set_args(self, **kwargs):
"""
Set more arguments to self.args
args:
**kwargs:
key and value represents dictionary key and value
"""
try:
kwargs_items = kwargs.iteritems()
except AttributeError:
kwargs_items = kwargs... | Set more arguments to self.args
args:
**kwargs:
key and value represents dictionary key and value |
def post_user_login(sender, request, user, **kwargs):
"""
Create a profile for the user, when missing.
Make sure that all neccessary user groups exist and have the right permissions.
We need that automatism for people not calling the configure tool,
admin rights for admins after the ... | Create a profile for the user, when missing.
Make sure that all neccessary user groups exist and have the right permissions.
We need that automatism for people not calling the configure tool,
admin rights for admins after the first login, and similar cases. |
def contains(this, that, axis=semantics.axis_default):
"""Returns bool for each element of `that`, indicating if it is contained in `this`
Parameters
----------
this : indexable key sequence
sequence of items to test against
that : indexable key sequence
sequence of items to test fo... | Returns bool for each element of `that`, indicating if it is contained in `this`
Parameters
----------
this : indexable key sequence
sequence of items to test against
that : indexable key sequence
sequence of items to test for
Returns
-------
ndarray, [that.size], bool
... |
def available_tcp_port(reactor):
"""
Returns a Deferred firing an available TCP port on localhost.
It does so by listening on port 0; then stopListening and fires the
assigned port number.
"""
endpoint = serverFromString(reactor, 'tcp:0:interface=127.0.0.1')
port = yield endpoint.listen(NoO... | Returns a Deferred firing an available TCP port on localhost.
It does so by listening on port 0; then stopListening and fires the
assigned port number. |
def prepend(self, _, child, name=None):
"""Adds childs to this tag, starting from the first position."""
self._insert(child, prepend=True, name=name)
return self | Adds childs to this tag, starting from the first position. |
def WhereIs(self, prog, path=None, pathext=None, reject=[]):
"""Find prog in the path.
"""
if path is None:
try:
path = self['ENV']['PATH']
except KeyError:
pass
elif SCons.Util.is_String(path):
path = self.subst(path)
... | Find prog in the path. |
def register_hit_type(
self, title, description, reward, duration_hours, keywords, qualifications
):
"""Register HIT Type for this HIT and return the type's ID, which
is required for creating a HIT.
"""
reward = str(reward)
duration_secs = int(datetime.timedelta(hours... | Register HIT Type for this HIT and return the type's ID, which
is required for creating a HIT. |
def advance(parser):
# type: (Parser) -> None
"""Moves the internal parser object to the next lexed token."""
prev_end = parser.token.end
parser.prev_end = prev_end
parser.token = parser.lexer.next_token(prev_end) | Moves the internal parser object to the next lexed token. |
def get_key_auth_cb(key_filepath):
"""This is just a convenience function for key-based login."""
def auth_cb(ssh):
key = ssh_pki_import_privkey_file(key_filepath)
ssh.userauth_publickey(key)
return auth_cb | This is just a convenience function for key-based login. |
def label_components(self, display = None):
'''
API: label_components(self, display=None)
Description:
This method labels the nodes of an undirected graph with component
numbers so that each node has the same label as all nodes in the
same component. It will display the a... | API: label_components(self, display=None)
Description:
This method labels the nodes of an undirected graph with component
numbers so that each node has the same label as all nodes in the
same component. It will display the algortihm if display argument is
provided.
Input:... |
def state():
'''Get The playback state: 'playing', 'paused', or 'stopped'.
If PLAYING or PAUSED, show information on current track.
Calls PlaybackController.get_state(), and if state is PLAYING or PAUSED, get
PlaybackController.get_current_track() and
PlaybackController.get_time_position()'''
... | Get The playback state: 'playing', 'paused', or 'stopped'.
If PLAYING or PAUSED, show information on current track.
Calls PlaybackController.get_state(), and if state is PLAYING or PAUSED, get
PlaybackController.get_current_track() and
PlaybackController.get_time_position() |
def find(self, item, description='', event_type=''):
"""
Find regexp in activitylog
find record as if type are in description.
"""
# TODO: should be refactored, dumb logic
if ': ' in item:
splited = item.split(': ', 1)
if splited[0] in self.TYPES:
... | Find regexp in activitylog
find record as if type are in description. |
def htmlCtxtUseOptions(self, options):
"""Applies the options to the parser context """
ret = libxml2mod.htmlCtxtUseOptions(self._o, options)
return ret | Applies the options to the parser context |
def update_metric(self, metric, labels, pre_sliced=False):
"""Update metric with the current executor."""
self.curr_execgrp.update_metric(metric, labels, pre_sliced) | Update metric with the current executor. |
def send_signal(self, s):
"""
Send a signal to the daemon process.
The signal must have been enabled using the ``signals``
parameter of :py:meth:`Service.__init__`. Otherwise, a
``ValueError`` is raised.
"""
self._get_signal_event(s) # Check if signal has been e... | Send a signal to the daemon process.
The signal must have been enabled using the ``signals``
parameter of :py:meth:`Service.__init__`. Otherwise, a
``ValueError`` is raised. |
def cookie_attr_value_check(attr_name, attr_value):
""" Check cookie attribute value for validity. Return True if value is valid
:param attr_name: attribute name to check
:param attr_value: attribute value to check
:return: bool
"""
attr_value.encode('us-ascii')
return WHTTPCookie.cookie_attr_value_compl... | Check cookie attribute value for validity. Return True if value is valid
:param attr_name: attribute name to check
:param attr_value: attribute value to check
:return: bool |
def referenceLengths(self):
"""
Get the lengths of wanted references.
@raise UnknownReference: If a reference id is not present in the
SAM/BAM file.
@return: A C{dict} of C{str} reference id to C{int} length with a key
for each reference id in C{self.referenceIds... | Get the lengths of wanted references.
@raise UnknownReference: If a reference id is not present in the
SAM/BAM file.
@return: A C{dict} of C{str} reference id to C{int} length with a key
for each reference id in C{self.referenceIds} or for all references
if C{self.re... |
def search(self, CorpNum, JobID, TradeType, TradeUsage, Page, PerPage, Order, UserID=None):
""" 수집 결과 조회
args
CorpNum : 팝빌회원 사업자번호
JobID : 작업아이디
TradeType : 문서형태 배열, N-일반 현금영수증, C-취소 현금영수증
TradeUsage : 거래구분 배열, P-소등공제용, C-지출증빙용
... | 수집 결과 조회
args
CorpNum : 팝빌회원 사업자번호
JobID : 작업아이디
TradeType : 문서형태 배열, N-일반 현금영수증, C-취소 현금영수증
TradeUsage : 거래구분 배열, P-소등공제용, C-지출증빙용
Page : 페이지 번호
PerPage : 페이지당 목록 개수, 최대 1000개
Order : 정렬 방향, D-내림... |
def parse_color(color):
r"""Turns a color into an (r, g, b) tuple
>>> parse_color('white')
(255, 255, 255)
>>> parse_color('#ff0000')
(255, 0, 0)
>>> parse_color('#f00')
(255, 0, 0)
>>> parse_color((255, 0, 0))
(255, 0, 0)
>>> from fabulous import grapefruit
>>> parse_color(... | r"""Turns a color into an (r, g, b) tuple
>>> parse_color('white')
(255, 255, 255)
>>> parse_color('#ff0000')
(255, 0, 0)
>>> parse_color('#f00')
(255, 0, 0)
>>> parse_color((255, 0, 0))
(255, 0, 0)
>>> from fabulous import grapefruit
>>> parse_color(grapefruit.Color((0.0, 1.0, ... |
def paste(xsel=False):
"""Returns system clipboard contents."""
selection = "primary" if xsel else "clipboard"
try:
return subprocess.Popen(["xclip", "-selection", selection, "-o"], stdout=subprocess.PIPE).communicate()[0].decode("utf-8")
except OSError as why:
raise XclipNotFound | Returns system clipboard contents. |
def connect(self, exe_path=None, **kwargs):
"""
直接连接登陆后的客户端
:param exe_path: 客户端路径类似 r'C:\\htzqzyb2\\xiadan.exe', 默认 r'C:\\htzqzyb2\\xiadan.exe'
:return:
"""
connect_path = exe_path or self._config.DEFAULT_EXE_PATH
if connect_path is None:
raise ValueE... | 直接连接登陆后的客户端
:param exe_path: 客户端路径类似 r'C:\\htzqzyb2\\xiadan.exe', 默认 r'C:\\htzqzyb2\\xiadan.exe'
:return: |
def _rm_get_repeat_coords_from_header(parts):
"""
extract the repeat coordinates of a repeat masker match from a header line.
An example header line is::
239 29.42 1.92 0.97 chr1 11 17 (41) C XX#YY (74) 104 1 m_b1s502i1 4
239 29.42 1.92 0.97 chr1 11 17 (41) XX#YY 1 104 (74) m_b1s502i1 4
if the match ... | extract the repeat coordinates of a repeat masker match from a header line.
An example header line is::
239 29.42 1.92 0.97 chr1 11 17 (41) C XX#YY (74) 104 1 m_b1s502i1 4
239 29.42 1.92 0.97 chr1 11 17 (41) XX#YY 1 104 (74) m_b1s502i1 4
if the match is to the reverse complement, the start and end coordi... |
def loads(data, use_datetime=0):
"""data -> unmarshalled data, method name
Convert an XML-RPC packet to unmarshalled data plus a method
name (None if not present).
If the XML-RPC packet represents a fault condition, this function
raises a Fault exception.
"""
p, u = getparser(use_datetime=... | data -> unmarshalled data, method name
Convert an XML-RPC packet to unmarshalled data plus a method
name (None if not present).
If the XML-RPC packet represents a fault condition, this function
raises a Fault exception. |
def to_bitarray(data, width=8):
''' Convert data (list of integers, bytearray or integer) to bitarray '''
if isinstance(data, list) or isinstance(data, bytearray):
data = combine_hex(data)
return [True if digit == '1' else False for digit in bin(data)[2:].zfill(width)] | Convert data (list of integers, bytearray or integer) to bitarray |
def _guess_x_simple(self, y_desired, y_dims=None, **kwargs):
"""Provide an initial guesses for a probable x from y"""
_, indexes = self.fmodel.dataset.nn_y(y_desired, dims=y_dims, k = 10)
return [self.fmodel.get_x(i) for i in indexes] | Provide an initial guesses for a probable x from y |
def _write_csv(self, datasets, filename):
"""
Write CSV
:param datasets: Datasets
:param filename: File Name
"""
with open('/'.join([self.output, filename]), mode='w', encoding=self.encoding) as write_file:
writer = csv.writer(write_file, delimiter=',')
... | Write CSV
:param datasets: Datasets
:param filename: File Name |
def load_from_config(self, config):
"""Load model from passed configuration."""
self.site = config.get("id", False)
self.classification = config.get("class", False)
self.tags = config.get("tags", False)
self._load_key_value(
config.get("key_value_data", False)
... | Load model from passed configuration. |
def pstdev(data, mu=None):
"""Return the square root of the population variance.
See ``pvariance`` for arguments and other details.
"""
var = pvariance(data, mu)
try:
return var.sqrt()
except AttributeError:
return math.sqrt(var) | Return the square root of the population variance.
See ``pvariance`` for arguments and other details. |
def smoothed_hazard_(self, bandwidth):
"""
Parameters
-----------
bandwidth: float
the bandwith used in the Epanechnikov kernel.
Returns
-------
DataFrame:
a DataFrame of the smoothed hazard
"""
timeline = self.timeline
... | Parameters
-----------
bandwidth: float
the bandwith used in the Epanechnikov kernel.
Returns
-------
DataFrame:
a DataFrame of the smoothed hazard |
def add_if_unique(self, name):
"""
Returns ``True`` on success.
Returns ``False`` if the name already exists in the namespace.
"""
with self.lock:
if name not in self.names:
self.names.append(name)
return True
return False | Returns ``True`` on success.
Returns ``False`` if the name already exists in the namespace. |
def send_single_value(self, channel: int, value: int) -> int:
"""
Send a single value to the uDMX
:param channel: DMX channel number, 1-512
:param value: Value to be sent to channel, 0-255
:return: number of bytes actually sent
"""
SetSingleChannel = 1
n =... | Send a single value to the uDMX
:param channel: DMX channel number, 1-512
:param value: Value to be sent to channel, 0-255
:return: number of bytes actually sent |
def get_users(session, query):
"""
Get one or more users
"""
# GET /api/users/0.1/users
response = make_get_request(session, 'users', params_data=query)
json_data = response.json()
if response.status_code == 200:
return json_data['result']
else:
raise UsersNotFoundExcepti... | Get one or more users |
def t_ID(self, t):
r"""[a-zA-Z_][a-zA-Z_0-9]*"""
# If the value is a reserved name, give it the appropriate type (not ID)
if t.value in self.reserved:
t.type = self.reserved[t.value]
# If it's a function, give it the FUNC type
elif t.value in self.functions:
... | r"""[a-zA-Z_][a-zA-Z_0-9]* |
def topil(self, **kwargs):
"""
Get PIL Image.
:return: :py:class:`PIL.Image`, or `None` if the composed image is not
available.
"""
if self.has_preview():
return pil_io.convert_image_data_to_pil(self._record, **kwargs)
return None | Get PIL Image.
:return: :py:class:`PIL.Image`, or `None` if the composed image is not
available. |
def to_julian_date(self):
"""
Convert Datetime Array to float64 ndarray of Julian Dates.
0 Julian date is noon January 1, 4713 BC.
http://en.wikipedia.org/wiki/Julian_day
"""
# http://mysite.verizon.net/aesir_research/date/jdalg2.htm
year = np.asarray(self.year)
... | Convert Datetime Array to float64 ndarray of Julian Dates.
0 Julian date is noon January 1, 4713 BC.
http://en.wikipedia.org/wiki/Julian_day |
def wraps(wrapped,
assigned = WRAPPER_ASSIGNMENTS,
updated = WRAPPER_UPDATES):
"""Decorator factory to apply update_wrapper() to a wrapper function
Returns a decorator that invokes update_wrapper() with the decorated
function as the wrapper argument and the arguments to wraps() as... | Decorator factory to apply update_wrapper() to a wrapper function
Returns a decorator that invokes update_wrapper() with the decorated
function as the wrapper argument and the arguments to wraps() as the
remaining arguments. Default arguments are as for update_wrapper().
This is a convenien... |
def filter_recordings(recordings):
"""Remove all recordings which have points without time.
Parameters
----------
recordings : list of dicts
Each dictionary has the keys 'data' and 'segmentation'
Returns
-------
list of dicts :
Only recordings where all points have time valu... | Remove all recordings which have points without time.
Parameters
----------
recordings : list of dicts
Each dictionary has the keys 'data' and 'segmentation'
Returns
-------
list of dicts :
Only recordings where all points have time values. |
def current_frame(self):
""" The current frame number that should be displayed."""
if not self.__fps:
raise RuntimeError("fps not set so current frame number cannot be"
" calculated")
else:
return int(self.__fps * self.time) | The current frame number that should be displayed. |
def trim_wav_ms(in_path: Path, out_path: Path,
start_time: int, end_time: int) -> None:
""" Extracts part of a WAV File.
First attempts to call sox. If sox is unavailable, it backs off to
pydub+ffmpeg.
Args:
in_path: A path to the source file to extract a portion of
out... | Extracts part of a WAV File.
First attempts to call sox. If sox is unavailable, it backs off to
pydub+ffmpeg.
Args:
in_path: A path to the source file to extract a portion of
out_path: A path describing the to-be-created WAV file.
start_time: The point in the source WAV file at whi... |
def community_topic_posts(self, id, **kwargs):
"https://developer.zendesk.com/rest_api/docs/help_center/posts#list-posts"
api_path = "/api/v2/community/topics/{id}/posts.json"
api_path = api_path.format(id=id)
return self.call(api_path, **kwargs) | https://developer.zendesk.com/rest_api/docs/help_center/posts#list-posts |
def addToDB(abbr = None, dbname = manualDBname):
"""Adds _abbr_ to the database of journals. The database is kept separate from the one scraped from WOS, this supersedes it. The database by default is stored with the WOS one and the name is given by `metaknowledge.journalAbbreviations.manualDBname`. To create an em... | Adds _abbr_ to the database of journals. The database is kept separate from the one scraped from WOS, this supersedes it. The database by default is stored with the WOS one and the name is given by `metaknowledge.journalAbbreviations.manualDBname`. To create an empty database run **addToDB** without an _abbr_ argument.... |
def render_diagram(root_task, out_base, max_param_len=20, horizontal=False, colored=False):
"""Render a diagram of the ETL pipeline
All upstream tasks (i.e. requirements) of :attr:`root_task` are rendered.
Nodes are, by default, styled as simple rects. This style is augmented by any
:attr:`diagram_sty... | Render a diagram of the ETL pipeline
All upstream tasks (i.e. requirements) of :attr:`root_task` are rendered.
Nodes are, by default, styled as simple rects. This style is augmented by any
:attr:`diagram_style` attributes of the tasks.
.. note:: This function requires the 'dot' executable from the Gr... |
def _string_find(self, substr, start=None, end=None):
"""
Returns position (0 indexed) of first occurence of substring,
optionally after a particular position (0 indexed)
Parameters
----------
substr : string
start : int, default None
end : int, default None
Not currently implem... | Returns position (0 indexed) of first occurence of substring,
optionally after a particular position (0 indexed)
Parameters
----------
substr : string
start : int, default None
end : int, default None
Not currently implemented
Returns
-------
position : int, 0 indexed |
def compute_checksum(line):
"""Compute the TLE checksum for the given line."""
return sum((int(c) if c.isdigit() else c == '-') for c in line[0:68]) % 10 | Compute the TLE checksum for the given line. |
def maybe_call_closing_deferred(self):
"""
Used internally to callback on the _closing_deferred if it
exists.
"""
if self._closing_deferred:
self._closing_deferred.callback(self)
self._closing_deferred = None | Used internally to callback on the _closing_deferred if it
exists. |
def setModel(self, model):
""" Sets the model.
Checks that the model is a
"""
check_class(model, BaseTreeModel)
super(ArgosTreeView, self).setModel(model) | Sets the model.
Checks that the model is a |
def trans_new(name, transform, inverse, breaks=None,
minor_breaks=None, _format=None,
domain=(-np.inf, np.inf), doc='', **kwargs):
"""
Create a transformation class object
Parameters
----------
name : str
Name of the transformation
transform : callable ``f(x)... | Create a transformation class object
Parameters
----------
name : str
Name of the transformation
transform : callable ``f(x)``
A function (preferably a `ufunc`) that computes
the transformation.
inverse : callable ``f(x)``
A function (preferably a `ufunc`) that compu... |
def UrnStringToHuntId(urn):
"""Converts given URN string to a flow id string."""
if urn.startswith(AFF4_PREFIX):
urn = urn[len(AFF4_PREFIX):]
components = urn.split("/")
if len(components) != 2 or components[0] != "hunts":
raise ValueError("Invalid hunt URN: %s" % urn)
return components[-1] | Converts given URN string to a flow id string. |
def lattice(self, lattice):
"""
Sets Lattice associated with PeriodicSite
"""
self._lattice = lattice
self._coords = self._lattice.get_cartesian_coords(self._frac_coords) | Sets Lattice associated with PeriodicSite |
def decode_conjure_bean_type(cls, obj, conjure_type):
"""Decodes json into a conjure bean type (a plain bean, not enum
or union).
Args:
obj: the json object to decode
conjure_type: a class object which is the bean type
we're decoding into
Returns:... | Decodes json into a conjure bean type (a plain bean, not enum
or union).
Args:
obj: the json object to decode
conjure_type: a class object which is the bean type
we're decoding into
Returns:
A instance of a bean of type conjure_type. |
def make_rawr_zip_payload(rawr_tile, date_time=None):
"""make a zip file from the rawr tile formatted data"""
if date_time is None:
date_time = gmtime()[0:6]
buf = StringIO()
with zipfile.ZipFile(buf, mode='w') as z:
for fmt_data in rawr_tile.all_formatted_data:
zip_info = z... | make a zip file from the rawr tile formatted data |
def toc(self):
"""End collecting for current batch and return results.
Call after computation of current batch.
Returns
-------
res : list of """
if not self.activated:
return []
for exe in self.exes:
for array in exe.arg_arrays:
... | End collecting for current batch and return results.
Call after computation of current batch.
Returns
-------
res : list of |
def add_content(self, content, mime_type=None):
"""Add content to the email
:param contents: Content to be added to the email
:type contents: Content
:param mime_type: Override the mime type
:type mime_type: MimeType, str
"""
if isinstance(content, str):
... | Add content to the email
:param contents: Content to be added to the email
:type contents: Content
:param mime_type: Override the mime type
:type mime_type: MimeType, str |
def lost_dimensions(point_fmt_in, point_fmt_out):
""" Returns a list of the names of the dimensions that will be lost
when converting from point_fmt_in to point_fmt_out
"""
unpacked_dims_in = PointFormat(point_fmt_in).dtype
unpacked_dims_out = PointFormat(point_fmt_out).dtype
out_dims = unpac... | Returns a list of the names of the dimensions that will be lost
when converting from point_fmt_in to point_fmt_out |
def rbd_exists(service, pool, rbd_img):
"""Check to see if a RADOS block device exists."""
try:
out = check_output(['rbd', 'list', '--id',
service, '--pool', pool])
if six.PY3:
out = out.decode('UTF-8')
except CalledProcessError:
return False
... | Check to see if a RADOS block device exists. |
def requires_to_requires_dist(requirement):
"""Compose the version predicates for requirement in PEP 345 fashion."""
requires_dist = []
for op, ver in requirement.specs:
requires_dist.append(op + ver)
if not requires_dist:
return ''
return " (%s)" % ','.join(requires_dist) | Compose the version predicates for requirement in PEP 345 fashion. |
async def _queue(self, ctx, page: int = 1):
""" Shows the player's queue. """
player = self.bot.lavalink.players.get(ctx.guild.id)
if not player.queue:
return await ctx.send('There\'s nothing in the queue! Why not queue something?')
items_per_page = 10
pages... | Shows the player's queue. |
def pcmd(host, seq, progressive, lr, fb, vv, va):
"""
Makes the drone move (translate/rotate).
Parameters:
seq -- sequence number
progressive -- True: enable progressive commands, False: disable (i.e.
enable hovering mode)
lr -- left-right tilt: float [-1..1] negative: left, positive: r... | Makes the drone move (translate/rotate).
Parameters:
seq -- sequence number
progressive -- True: enable progressive commands, False: disable (i.e.
enable hovering mode)
lr -- left-right tilt: float [-1..1] negative: left, positive: right
rb -- front-back tilt: float [-1..1] negative: forwar... |
def dump_misspelling_list(self):
"""Returns a list of misspelled words and corrections."""
results = []
for bad_word in sorted(self._misspelling_dict.keys()):
for correction in self._misspelling_dict[bad_word]:
results.append([bad_word, correction])
return results | Returns a list of misspelled words and corrections. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.