code stringlengths 75 104k | docstring stringlengths 1 46.9k |
|---|---|
def from_dict(self,
dingos_obj_dict,
config_hooks=None,
namespace_dict=None,
):
"""
Convert DingoObjDict to facts and associate resulting facts with this information object.
"""
# Instantiate default parameters
... | Convert DingoObjDict to facts and associate resulting facts with this information object. |
def udf(x):
"""
No-op routine for with an argument signature matching udfuns.
Allways returns 0.0 .
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/udf_c.html
:param x: Double precision value, unused.
:type x: float
:return: Double precision value, unused.
:rtype: float
""... | No-op routine for with an argument signature matching udfuns.
Allways returns 0.0 .
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/udf_c.html
:param x: Double precision value, unused.
:type x: float
:return: Double precision value, unused.
:rtype: float |
def _startServiceJobs(self):
"""Start any service jobs available from the service manager"""
self.issueQueingServiceJobs()
while True:
serviceJob = self.serviceManager.getServiceJobsToStart(0)
# Stop trying to get jobs when function returns None
if serviceJob ... | Start any service jobs available from the service manager |
def _hash_data(hasher, data):
"""Generate hash of data using provided hash type.
:param hasher: Hasher instance to use as a base for calculating hash
:type hasher: cryptography.hazmat.primitives.hashes.Hash
:param bytes data: Data to sign
:returns: Hash of data
:rtype: bytes
"""
_hasher... | Generate hash of data using provided hash type.
:param hasher: Hasher instance to use as a base for calculating hash
:type hasher: cryptography.hazmat.primitives.hashes.Hash
:param bytes data: Data to sign
:returns: Hash of data
:rtype: bytes |
def create_node(participant_id):
"""Send a POST request to the node table.
This makes a new node for the participant, it calls:
1. exp.get_network_for_participant
2. exp.create_node
3. exp.add_node_to_network
4. exp.node_post_request
"""
exp = Experiment(session)
# ... | Send a POST request to the node table.
This makes a new node for the participant, it calls:
1. exp.get_network_for_participant
2. exp.create_node
3. exp.add_node_to_network
4. exp.node_post_request |
def dcmdottoang_vel(R,Rdot):
"""Convert a rotation matrix to angular velocity
w - angular velocity in inertial frame
Omega - angular velocity in body frame
"""
w = vee_map(Rdot.dot(R.T))
Omega = vee_map(R.T.dot(Rdot))
return (w, Omega) | Convert a rotation matrix to angular velocity
w - angular velocity in inertial frame
Omega - angular velocity in body frame |
def _dict_to_stanza(key, stanza):
'''
Convert a dict to a multi-line stanza
'''
ret = ''
for skey in stanza:
if stanza[skey] is True:
stanza[skey] = ''
ret += ' {0} {1}\n'.format(skey, stanza[skey])
return '{0} {{\n{1}}}'.format(key, ret) | Convert a dict to a multi-line stanza |
def temp_copy_extracted_submission(self):
"""Creates a temporary copy of extracted submission.
When executed, submission is allowed to modify it's own directory. So
to ensure that submission does not pass any data between runs, new
copy of the submission is made before each run. After a run temporary c... | Creates a temporary copy of extracted submission.
When executed, submission is allowed to modify it's own directory. So
to ensure that submission does not pass any data between runs, new
copy of the submission is made before each run. After a run temporary copy
of submission is deleted.
Returns:
... |
def create_searchspace(lookup, fastafn, proline_cut=False,
reverse_seqs=True, do_trypsinize=True):
"""Given a FASTA database, proteins are trypsinized and resulting peptides
stored in a database or dict for lookups"""
allpeps = []
for record in SeqIO.parse(fastafn, 'fasta'):
... | Given a FASTA database, proteins are trypsinized and resulting peptides
stored in a database or dict for lookups |
def parse_changes():
""" grab version from CHANGES and validate entry """
with open('CHANGES') as changes:
for match in re.finditer(RE_CHANGES, changes.read(1024), re.M):
if len(match.group(1)) != len(match.group(3)):
error('incorrect underline in CHANGES')
date... | grab version from CHANGES and validate entry |
def match_qualifier_id(self, qualifier_id, match):
"""Matches the qualifier identified by the given ``Id``.
arg: qualifier_id (osid.id.Id): the Id of the ``Qualifier``
arg: match (boolean): ``true`` if a positive match, ``false``
for a negative match
raise: NullAr... | Matches the qualifier identified by the given ``Id``.
arg: qualifier_id (osid.id.Id): the Id of the ``Qualifier``
arg: match (boolean): ``true`` if a positive match, ``false``
for a negative match
raise: NullArgument - ``qualifier_id`` is ``null``
*compliance: man... |
def formfield_for_dbfield(self, db_field, **kwargs):
"""
Hook for specifying the form Field instance for a given database Field
instance.
If kwargs are given, they're passed to the form Field's constructor.
"""
formfield = super().formfield_for_dbfield(db_field, **kwargs... | Hook for specifying the form Field instance for a given database Field
instance.
If kwargs are given, they're passed to the form Field's constructor. |
def columns(self):
"""获取用户专栏.
:return: 用户专栏,返回生成器
:rtype: Column.Iterable
"""
from .column import Column
if self.url is None or self.post_num == 0:
return
soup = BeautifulSoup(self._session.get(self.url + 'posts').text)
column_list = soup.fin... | 获取用户专栏.
:return: 用户专栏,返回生成器
:rtype: Column.Iterable |
def run(self):
""" Plan:
* We read into a fresh instance of IO obj until marker encountered.
* When marker is detected, we attach that IO obj to "results" array
and signal the calling code (through threading.Event flag) that
results are available
* repeat until .stop... | Plan:
* We read into a fresh instance of IO obj until marker encountered.
* When marker is detected, we attach that IO obj to "results" array
and signal the calling code (through threading.Event flag) that
results are available
* repeat until .stop() was called on the thread... |
def get_option(self, key, default=None):
"""Return option from synchronizer (possibly overridden by target extra_opts)."""
if self.synchronizer:
return self.extra_opts.get(key, self.synchronizer.options.get(key, default))
return self.extra_opts.get(key, default) | Return option from synchronizer (possibly overridden by target extra_opts). |
def summary_df_from_list(results_list, names, **kwargs):
"""Make a panda data frame of the mean and std devs of each element of a
list of 1d arrays, including the uncertainties on the values.
This just converts the array to a DataFrame and calls summary_df on it.
Parameters
----------
results_... | Make a panda data frame of the mean and std devs of each element of a
list of 1d arrays, including the uncertainties on the values.
This just converts the array to a DataFrame and calls summary_df on it.
Parameters
----------
results_list: list of 1d numpy arrays
Must have same length as n... |
def view_admin_log():
"""Page for viewing the log of admin activity."""
build = g.build
# TODO: Add paging
log_list = (
models.AdminLog.query
.filter_by(build_id=build.id)
.order_by(models.AdminLog.created.desc())
.all())
return render_template(
'view_admin... | Page for viewing the log of admin activity. |
def ticket_flag(self, which, new=None):
"""
Get or set a ticket flag.
'which' can be either a string ('APPEND_CR' etc.), or an integer.
You should ALWAYS use a string, unless you really know what you are doing.
"""
flag = _get_flag(which, TicketFlags)
if flag:
... | Get or set a ticket flag.
'which' can be either a string ('APPEND_CR' etc.), or an integer.
You should ALWAYS use a string, unless you really know what you are doing. |
def fix_jumps(self, row_selected, delta):
'''fix up jumps when we add/remove rows'''
numrows = self.grid_mission.GetNumberRows()
for row in range(numrows):
command = self.grid_mission.GetCellValue(row, ME_COMMAND_COL)
if command in ["DO_JUMP", "DO_CONDITION_JUMP"]:
... | fix up jumps when we add/remove rows |
def run_pyvcf(args):
"""Main program entry point after parsing arguments"""
# open VCF reader
reader = vcf.Reader(filename=args.input_vcf)
# optionally, open VCF writer
writer = None
# read through input VCF file, optionally also writing out
start = time.clock()
num = 0
for num, r in... | Main program entry point after parsing arguments |
def _acronym_lic(self, license_statement):
"""Convert license acronym."""
pat = re.compile(r'\(([\w+\W?\s?]+)\)')
if pat.search(license_statement):
lic = pat.search(license_statement).group(1)
if lic.startswith('CNRI'):
acronym_licence = lic[:4]
... | Convert license acronym. |
def define_points_grid(self):
"""
This is experimental code that could be used in the spatialDomainNoGrid
section to build a grid of points on which to generate the solution.
However, the current development plan (as of 27 Jan 2015) is to have the
end user supply the list of points where they want ... | This is experimental code that could be used in the spatialDomainNoGrid
section to build a grid of points on which to generate the solution.
However, the current development plan (as of 27 Jan 2015) is to have the
end user supply the list of points where they want a solution (and/or for
it to be provi... |
def activate(self):
"""
Activate an plan in a CREATED state.
"""
obj = self.find_paypal_object()
if obj.state == enums.BillingPlanState.CREATED:
success = obj.activate()
if not success:
raise PaypalApiError("Failed to activate plan: %r" % (obj.error))
# Resync the updated data to the database
se... | Activate an plan in a CREATED state. |
def delete(self, pk, **kwargs):
"""
Delete the object by primary_key:
.. code-block:: python
DBSession.sacrud(Users).delete(1)
DBSession.sacrud(Users).delete('1')
DBSession.sacrud(User2Groups).delete({'user_id': 4, 'group_id': 2})
JSON support:
... | Delete the object by primary_key:
.. code-block:: python
DBSession.sacrud(Users).delete(1)
DBSession.sacrud(Users).delete('1')
DBSession.sacrud(User2Groups).delete({'user_id': 4, 'group_id': 2})
JSON support:
.. code-block:: python
DBSession.s... |
def num_workers(self):
"""Returns the number of worker nodes.
Returns
-------
size :int
The number of worker nodes.
"""
size = ctypes.c_int()
check_call(_LIB.MXKVStoreGetGroupSize(self.handle, ctypes.byref(size)))
return size.value | Returns the number of worker nodes.
Returns
-------
size :int
The number of worker nodes. |
def create_col_nums():
"""Return column numbers and letters that repeat up to NUM_REPEATS.
I.e., NUM_REPEATS = 2 would return a list of 26 * 26 = 676 2-tuples.
"""
NUM_REPEATS = 2
column_letters = list(
string.ascii_uppercase
) + map(
''.join,
itertools.product(
... | Return column numbers and letters that repeat up to NUM_REPEATS.
I.e., NUM_REPEATS = 2 would return a list of 26 * 26 = 676 2-tuples. |
def xy_data(xdata, ydata, eydata=None, exdata=None, label=None, xlabel='', ylabel='', \
title='', shell_history=0, xshift=0, yshift=0, xshift_every=1, yshift_every=1, \
coarsen=0, style=None, clear=True, axes=None, xscale='linear', yscale='linear', grid=False, \
... | Plots specified data.
Parameters
----------
xdata, ydata
Arrays (or arrays of arrays) of data to plot
eydata=None, exdata=None
Arrays of x and y errorbar values
label=None
String or array of strings for the line labels
xlabel=''
L... |
def msg(self, msg=None, ret_r=False):
'''code's message'''
if msg or ret_r:
self._msg = msg
return self
return self._msg | code's message |
def verify_server_core(timeout=120, start_delay=90):
''' checks to see if the server_core is running
args:
delay: will cycle till core is up.
timeout: number of seconds to wait
'''
timestamp = time.time()
last_check = time.time() + start_delay - 10
last_delay_notific... | checks to see if the server_core is running
args:
delay: will cycle till core is up.
timeout: number of seconds to wait |
def process_satellites(self, helper, sess):
"""
check and show the good satellites
"""
good_satellites = helper.get_snmp_value(sess, helper, self.oids['oid_gps_satellites_good'])
# Show the summary and add the metric and afterwards check the metric
helper.add_summary("Go... | check and show the good satellites |
def get_rank(self, member, reverse=False, pipe=None):
"""
Return the rank of *member* in the collection.
By default, the member with the lowest score has rank 0.
If *reverse* is ``True``, the member with the highest score has rank 0.
"""
pipe = self.redis if pipe is None ... | Return the rank of *member* in the collection.
By default, the member with the lowest score has rank 0.
If *reverse* is ``True``, the member with the highest score has rank 0. |
def designPrimers(p3_args, input_log=None, output_log=None, err_log=None):
''' Return the raw primer3_core output for the provided primer3 args.
Returns an ordered dict of the boulderIO-format primer3 output file
'''
sp = subprocess.Popen([pjoin(PRIMER3_HOME, 'primer3_core')],
... | Return the raw primer3_core output for the provided primer3 args.
Returns an ordered dict of the boulderIO-format primer3 output file |
def _start_repl(api):
# type: (Iota) -> None
"""
Starts the REPL.
"""
banner = (
'IOTA API client for {uri} ({testnet}) '
'initialized as variable `api`.\n'
'Type `help(api)` for list of API commands.'.format(
testnet='testnet' ... | Starts the REPL. |
def _spec_to_matches(server_list, server_spec, mode):
"""
mode is in {uri, hostname, hostname_port}
A list of matching server docs.
Should usually be 0 or 1 matches. Multiple matches are possible though.
"""
assert mode in ("uri", "hostname", "hostname_port")
def match(server_doc):
... | mode is in {uri, hostname, hostname_port}
A list of matching server docs.
Should usually be 0 or 1 matches. Multiple matches are possible though. |
def AddInformationalOptions(self, argument_group):
"""Adds the informational options to the argument group.
Args:
argument_group (argparse._ArgumentGroup): argparse argument group.
"""
argument_group.add_argument(
'-d', '--debug', dest='debug', action='store_true', default=False,
... | Adds the informational options to the argument group.
Args:
argument_group (argparse._ArgumentGroup): argparse argument group. |
def _canceling_task(self, backend):
"""
Used internally to decrement `backend`s current and total task counts
when `backend` could not be reached.
"""
with self.backend_mutex:
self.backends[backend] -= 1
self.task_counter[backend] -= 1 | Used internally to decrement `backend`s current and total task counts
when `backend` could not be reached. |
def _from_p(self, mode):
"""Convert the image from P or PA to RGB or RGBA."""
self._check_modes(("P", "PA"))
if not self.palette:
raise RuntimeError("Can't convert palettized image, missing palette.")
pal = np.array(self.palette)
pal = da.from_array(pal, chunks=pal.s... | Convert the image from P or PA to RGB or RGBA. |
def get_tuning(instrument, description, nr_of_strings=None, nr_of_courses=None):
"""Get the first tuning that satisfies the constraints.
The instrument and description arguments are treated like
case-insensitive prefixes. So search for 'bass' is the same is
'Bass Guitar'.
Example:
>>> tunings.... | Get the first tuning that satisfies the constraints.
The instrument and description arguments are treated like
case-insensitive prefixes. So search for 'bass' is the same is
'Bass Guitar'.
Example:
>>> tunings.get_tuning('guitar', 'standard')
<tunings.StringTuning instance at 0x139ac20> |
def can_proceed(bound_method, check_conditions=True):
"""
Returns True if model in state allows to call bound_method
Set ``check_conditions`` argument to ``False`` to skip checking
conditions.
"""
if not hasattr(bound_method, '_django_fsm'):
im_func = getattr(bound_method, 'im_func', ge... | Returns True if model in state allows to call bound_method
Set ``check_conditions`` argument to ``False`` to skip checking
conditions. |
def decrease_reads_in_units(
current_provisioning, units, min_provisioned_reads, log_tag):
""" Decrease the current_provisioning with units units
:type current_provisioning: int
:param current_provisioning: The current provisioning
:type units: int
:param units: How many units should we dec... | Decrease the current_provisioning with units units
:type current_provisioning: int
:param current_provisioning: The current provisioning
:type units: int
:param units: How many units should we decrease with
:returns: int -- New provisioning value
:type min_provisioned_reads: int
:param min_... |
def setMetadata(self, remote, address, key, value):
"""Set metadata of device"""
try:
return self.proxies["%s-%s" % (self._interface_id, remote)].setMetadata(address, key, value)
except Exception as err:
LOG.debug("ServerThread.setMetadata: Exception: %s" % str(err)) | Set metadata of device |
def set_content_model(self):
"""
Set content_model to the child class's related name, or None if this is
the base class.
"""
if not self.content_model:
is_base_class = (
base_concrete_model(ContentTyped, self) == self.__class__)
self.conten... | Set content_model to the child class's related name, or None if this is
the base class. |
def _set_keychain(self, v, load=False):
"""
Setter method for keychain, mapped from YANG variable /keychain (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_keychain is considered as a private
method. Backends looking to populate this variable should
do so ... | Setter method for keychain, mapped from YANG variable /keychain (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_keychain is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_keychain() directly. |
def class_check_para(**kw):
"""
force check accept and return,
decorator, @class_check_para(accept=, returns=, mail=)
:param kw:
:return:
"""
try:
def decorator(f):
def new_f(*args):
if "accepts" in kw:
assert len(args) == len(kw["accep... | force check accept and return,
decorator, @class_check_para(accept=, returns=, mail=)
:param kw:
:return: |
def from_buffer(self, buf):
"""
Identify the contents of `buf`
"""
with self.lock:
try:
# if we're on python3, convert buf to bytes
# otherwise this string is passed as wchar*
# which is not what libmagic expects
... | Identify the contents of `buf` |
def set_app_os_tag(self, os_tag, app_tag, update_os, update_app):
"""Update the app and/or os tags."""
update_os = bool(update_os)
update_app = bool(update_app)
if update_os:
self.os_info = _unpack_version(os_tag)
if update_app:
self.app_info = _unpack_... | Update the app and/or os tags. |
def convert_language_code(django_lang):
"""
Converts Django language codes "ll-cc" into ISO codes "ll_CC" or "ll"
:param django_lang: Django language code as ll-cc
:type django_lang: str
:return: ISO language code as ll_CC
:rtype: str
"""
lang_and_country = django_lang.split('-')
tr... | Converts Django language codes "ll-cc" into ISO codes "ll_CC" or "ll"
:param django_lang: Django language code as ll-cc
:type django_lang: str
:return: ISO language code as ll_CC
:rtype: str |
def handle_market_close(self, dt, data_portal):
"""Handles the close of the given day.
Parameters
----------
dt : Timestamp
The most recently completed simulation datetime.
data_portal : DataPortal
The current data portal.
Returns
-------... | Handles the close of the given day.
Parameters
----------
dt : Timestamp
The most recently completed simulation datetime.
data_portal : DataPortal
The current data portal.
Returns
-------
A daily perf packet. |
def _centroids(n_clusters: int, points: List[List[float]]) -> List[List[float]]:
""" Return n_clusters centroids of points
"""
k_means = KMeans(n_clusters=n_clusters)
k_means.fit(points)
closest, _ = pairwise_distances_argmin_min(k_means.cluster_centers_, points)
return list(map(list, np.arra... | Return n_clusters centroids of points |
def ls(args):
"""
lexibank ls [COLS]+
column specification:
- license
- lexemes
- macroareas
"""
db = Database(args.db)
db.create(exists_ok=True)
in_db = {r[0]: r[1] for r in db.fetchall('select id, version from dataset')}
# FIXME: how to smartly choose columns?
table = ... | lexibank ls [COLS]+
column specification:
- license
- lexemes
- macroareas |
def find_le(a, x):
"""Find rightmost value less than or equal to x."""
i = bs.bisect_right(a, x)
if i: return i - 1
raise ValueError | Find rightmost value less than or equal to x. |
def solveConsMarkov(solution_next,IncomeDstn,LivPrb,DiscFac,CRRA,Rfree,PermGroFac,
MrkvArray,BoroCnstArt,aXtraGrid,vFuncBool,CubicBool):
'''
Solves a single period consumption-saving problem with risky income and
stochastic transitions between discrete states, in a Markov fa... | Solves a single period consumption-saving problem with risky income and
stochastic transitions between discrete states, in a Markov fashion. Has
identical inputs as solveConsIndShock, except for a discrete
Markov transitionrule MrkvArray. Markov states can differ in their interest
factor, permanent gr... |
def render(self, request, **kwargs):
"""
Renders this view. Adds cancel_url to the context.
If the request get parameters contains 'popup' then
the `render_type` is set to 'popup'.
"""
if request.GET.get('popup'):
self.render_type = 'popup'
kwargs[... | Renders this view. Adds cancel_url to the context.
If the request get parameters contains 'popup' then
the `render_type` is set to 'popup'. |
def _get_value(obj, key):
"""Get a value for 'key' from 'obj', if possible"""
if isinstance(obj, (list, tuple)):
for item in obj:
v = _find_value(key, item)
if v is not None:
return v
return None
if isinstance(obj, dict):
return obj.get(key)
... | Get a value for 'key' from 'obj', if possible |
def do_connect(self, arg):
''' Connect to the arm. '''
if self.arm.is_connected():
print(self.style.error('Error: ', 'Arm is already connected.'))
else:
try:
port = self.arm.connect()
print(self.style.success('Success: ',
... | Connect to the arm. |
def progress_bar_wrapper(iterable, **kwargs):
''' Wrapper that applies tqdm progress bar conditional on config settings.
'''
return tqdm(iterable, **kwargs) if (config.get_option('progress_bar')
and not isinstance(iterable, tqdm)) else iterable | Wrapper that applies tqdm progress bar conditional on config settings. |
def export(self, elec_file):
"""Export channel name and location to file.
Parameters
----------
elec_file : Path or str
path to file where to save csv
"""
elec_file = Path(elec_file)
if elec_file.suffix == '.csv':
sep = ', '
elif e... | Export channel name and location to file.
Parameters
----------
elec_file : Path or str
path to file where to save csv |
def setup_users_signals(self, ):
"""Setup the signals for the users page
:returns: None
:rtype: None
:raises: None
"""
log.debug("Setting up users page signals.")
self.users_user_view_pb.clicked.connect(self.users_view_user)
self.users_user_create_pb.clic... | Setup the signals for the users page
:returns: None
:rtype: None
:raises: None |
def set_terminal_converted(self, attr, repr_value):
"""
Converts the given representation value and sets the specified
attribute value to the converted value.
:param attr: Attribute to set.
:param str repr_value: String value of the attribute to set.
"""
value = ... | Converts the given representation value and sets the specified
attribute value to the converted value.
:param attr: Attribute to set.
:param str repr_value: String value of the attribute to set. |
def parseline(line,format):
"""\
Given a line (a string actually) and a short string telling
how to format it, return a list of python objects that result.
The format string maps words (as split by line.split()) into
python code:
x -> Nothing; skip this word
s -> Return this word ... | \
Given a line (a string actually) and a short string telling
how to format it, return a list of python objects that result.
The format string maps words (as split by line.split()) into
python code:
x -> Nothing; skip this word
s -> Return this word as a string
i -> Return th... |
def free_symbols(self):
"""Set of free SymPy symbols contained within the expression."""
if self._free_symbols is None:
if len(self._vals) == 0:
self._free_symbols = self.operand.free_symbols
else:
dummy_map = {}
for sym in self._va... | Set of free SymPy symbols contained within the expression. |
def delete_communication_channel_id(self, id, user_id):
"""
Delete a communication channel.
Delete an existing communication channel.
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - user_id
"""ID"""
path["user_id"] = u... | Delete a communication channel.
Delete an existing communication channel. |
def string_to_sign(self, http_request):
"""
Return the canonical StringToSign as well as a dict
containing the original version of all headers that
were included in the StringToSign.
"""
headers_to_sign = self.headers_to_sign(http_request)
canonical_headers = self... | Return the canonical StringToSign as well as a dict
containing the original version of all headers that
were included in the StringToSign. |
def config_string_to_dict(string, result=None):
"""
Convert a given configuration string ::
key_1=value_1|key_2=value_2|...|key_n=value_n
into the corresponding dictionary ::
dictionary[key_1] = value_1
dictionary[key_2] = value_2
...
dictionary[key_n] = value_n
... | Convert a given configuration string ::
key_1=value_1|key_2=value_2|...|key_n=value_n
into the corresponding dictionary ::
dictionary[key_1] = value_1
dictionary[key_2] = value_2
...
dictionary[key_n] = value_n
:param string string: the configuration string
:rtype... |
def captcha_transmit(self, captcha, uuid):
"""Delayed transmission of a requested captcha"""
self.log('Transmitting captcha')
response = {
'component': 'hfos.enrol.enrolmanager',
'action': 'captcha',
'data': b64encode(captcha['image'].getvalue()).decode('utf... | Delayed transmission of a requested captcha |
def hide(self, eid, index=0):
"""
Hide the element with the matching eid. If no match, look for an element with a matching rid.
"""
elems = None
if eid in self.__element_ids:
elems = self.__element_ids[eid]
elif eid in self.__repeat_ids:
elems = se... | Hide the element with the matching eid. If no match, look for an element with a matching rid. |
def list_recent_networks(self) -> List[Network]:
"""List the most recently created version of each network (by name)."""
most_recent_times = (
self.session
.query(
Network.name.label('network_name'),
func.max(Network.created).label('max_created')
... | List the most recently created version of each network (by name). |
def create(**data):
"""
Create a customer.
:param data: data required to create the customer
:return: The customer resource
:rtype resources.Customer
"""
http_client = HttpClient()
response, _ = http_client.post(routes.url(routes.CUSTOMER_RESOURCE), data... | Create a customer.
:param data: data required to create the customer
:return: The customer resource
:rtype resources.Customer |
def get_filename(key, message, default=None, history=None):
"""
Like :meth:`prompt`, but only accepts the name of an existing file
as an input.
:type key: str
:param key: The key under which to store the input in the :class:`InputHistory`.
:type message: str
:param message: The user promp... | Like :meth:`prompt`, but only accepts the name of an existing file
as an input.
:type key: str
:param key: The key under which to store the input in the :class:`InputHistory`.
:type message: str
:param message: The user prompt.
:type default: str|None
:param default: The offered default ... |
def show_in_external_file_explorer(fnames=None):
"""Show files in external file explorer
Args:
fnames (list): Names of files to show.
"""
if not isinstance(fnames, (tuple, list)):
fnames = [fnames]
for fname in fnames:
open_file_in_external_explorer(fname) | Show files in external file explorer
Args:
fnames (list): Names of files to show. |
def log_once(log_func, msg, *args, **kwargs):
""""Logs a message only once."""
if msg not in _LOG_ONCE_SEEN:
log_func(msg, *args, **kwargs)
# Key on the message, ignoring args. This should fit most use cases.
_LOG_ONCE_SEEN.add(msg) | Logs a message only once. |
def close(self):
"""Closes the connection."""
if not self._closed:
self._closed = True
self.client.close() | Closes the connection. |
def search(query=None, catalog=None):
"""Search
"""
if query is None:
query = make_query(catalog)
if query is None:
return []
return api.search(query, catalog=catalog) | Search |
def cross_list(*sequences):
"""
From: http://book.opensourceproject.org.cn/lamp/python/pythoncook2/opensource/0596007973/pythoncook2-chp-19-sect-9.html
"""
result = [[ ]]
for seq in sequences:
result = [sublist+[item] for sublist in result for item in seq]
return result | From: http://book.opensourceproject.org.cn/lamp/python/pythoncook2/opensource/0596007973/pythoncook2-chp-19-sect-9.html |
def play_game(game, *players):
"""Play an n-person, move-alternating game.
>>> play_game(Fig52Game(), alphabeta_player, alphabeta_player)
3
"""
state = game.initial
while True:
for player in players:
move = player(game, state)
state = game.result(state, move)
... | Play an n-person, move-alternating game.
>>> play_game(Fig52Game(), alphabeta_player, alphabeta_player)
3 |
def _get_pltdotstrs(self, hdrgos_usr, **kws):
"""Plot GO DAGs for each group found under a specfied header GO."""
import datetime
import timeit
dotstrs_all = []
tic = timeit.default_timer()
# Loop through GO groups. Each group of GOs is formed under a single "header GO"
... | Plot GO DAGs for each group found under a specfied header GO. |
def add_to_batch(self, batch):
'''
Adds paths to the given batch object. They are all added as
GL_TRIANGLES, so the batch will aggregate them all into a single OpenGL
primitive.
'''
for name in self.paths:
svg_path = self.paths[name]
svg_path.add_t... | Adds paths to the given batch object. They are all added as
GL_TRIANGLES, so the batch will aggregate them all into a single OpenGL
primitive. |
def get_connections(self, id, connection_name, **args):
"""Fetches the connections for given object."""
return self.request(
"{0}/{1}/{2}".format(self.version, id, connection_name), args
) | Fetches the connections for given object. |
def stop_capture(self, port_number):
"""
Stops a packet capture.
:param port_number: allocated port number
"""
if not [port["port_number"] for port in self._ports_mapping if port_number == port["port_number"]]:
raise NodeError("Port {port_number} doesn't exist on cl... | Stops a packet capture.
:param port_number: allocated port number |
def modsplit(s):
"""Split importable"""
if ':' in s:
c = s.split(':')
if len(c) != 2:
raise ValueError("Syntax error: {s}")
return c[0], c[1]
else:
c = s.split('.')
if len(c) < 2:
raise ValueError("Syntax error: {s}")
return '.'.join(c[... | Split importable |
def p_obs(self, obs, out=None):
"""
Returns the output probabilities for an entire trajectory and all hidden states
Parameters
----------
obs : ndarray((T), dtype=int)
a discrete trajectory of length T
Return
------
p_o : ndarray (T,N)
... | Returns the output probabilities for an entire trajectory and all hidden states
Parameters
----------
obs : ndarray((T), dtype=int)
a discrete trajectory of length T
Return
------
p_o : ndarray (T,N)
the probability of generating the symbol at ti... |
def softmax(self, params):
'''
Run the softmax selection strategy.
Parameters
----------
Params : dict
Tau
Returns
-------
int
Index of chosen bandit
'''
default_tau = 0.1
if params and type(params) == di... | Run the softmax selection strategy.
Parameters
----------
Params : dict
Tau
Returns
-------
int
Index of chosen bandit |
def sample(self):
"""
Compute new samples.
"""
self._sampling = True
try:
if self.is_raw_perf_class and not self._previous_sample:
self._current_sample = self._query()
self._previous_sample = self._current_sample
self._current... | Compute new samples. |
def format_records(records):
"""Serialise multiple records"""
formatted = list()
for record_ in records:
formatted.append(format_record(record_))
return formatted | Serialise multiple records |
def get_typ(self, refobj):
"""Return the entity type of the given reftrack node
See: :data:`MayaRefobjInterface.types`.
:param refobj: the reftrack node to query
:type refobj: str
:returns: the entity type
:rtype: str
:raises: ValueError
"""
enum... | Return the entity type of the given reftrack node
See: :data:`MayaRefobjInterface.types`.
:param refobj: the reftrack node to query
:type refobj: str
:returns: the entity type
:rtype: str
:raises: ValueError |
def asciigraph(self, values=None, max_height=None, max_width=None, label=False):
'''
Accepts a list of y values and returns an ascii graph
Optionally values can also be a dictionary with a key of timestamp, and a value of value. InGraphs returns data in this format for example.
'''
... | Accepts a list of y values and returns an ascii graph
Optionally values can also be a dictionary with a key of timestamp, and a value of value. InGraphs returns data in this format for example. |
def generate_configurations(*, guided=False, fresh_start=False, save=False):
"""
If a config file is found in the standard locations, it will be loaded and
the config data would be retuned. If not found, then generate the data on
the fly, and return it
"""
if fresh_start:
purge_configs(... | If a config file is found in the standard locations, it will be loaded and
the config data would be retuned. If not found, then generate the data on
the fly, and return it |
def remove_prefix(self, args):
""" Remove a prefix.
Valid keys in the `args`-struct:
* `auth` [struct]
Authentication options passed to the :class:`AuthFactory`.
* `prefix` [struct]
Attributes used to select what prefix to remove.
... | Remove a prefix.
Valid keys in the `args`-struct:
* `auth` [struct]
Authentication options passed to the :class:`AuthFactory`.
* `prefix` [struct]
Attributes used to select what prefix to remove.
* `recursive` [boolean]
Wh... |
def extract_features(self, data_frame, pre=''):
"""
This method extracts all the features available to the Tremor Processor class.
:param data_frame: the data frame
:type data_frame: pandas.DataFrame
:return: amplitude_by_fft, frequency_by_fft, amplitude_by_welch... | This method extracts all the features available to the Tremor Processor class.
:param data_frame: the data frame
:type data_frame: pandas.DataFrame
:return: amplitude_by_fft, frequency_by_fft, amplitude_by_welch, frequency_by_fft, bradykinesia_amplitude_by_fft, \
... |
def _dictToAlignments(self, blastDict, read):
"""
Take a dict (made by XMLRecordsReader._convertBlastRecordToDict)
and convert it to a list of alignments.
@param blastDict: A C{dict}, from convertBlastRecordToDict.
@param read: A C{Read} instance, containing the read that BLAST ... | Take a dict (made by XMLRecordsReader._convertBlastRecordToDict)
and convert it to a list of alignments.
@param blastDict: A C{dict}, from convertBlastRecordToDict.
@param read: A C{Read} instance, containing the read that BLAST used
to create this record.
@raise ValueError:... |
def list_available_genomes(provider=None):
"""
List all available genomes.
Parameters
----------
provider : str, optional
List genomes from specific provider. Genomes from all
providers will be returned if not specified.
Returns
-------
list with genome names
"""
... | List all available genomes.
Parameters
----------
provider : str, optional
List genomes from specific provider. Genomes from all
providers will be returned if not specified.
Returns
-------
list with genome names |
def _proxy(self):
"""
Generate an instance context for the instance, the context is capable of
performing various actions. All instance actions are proxied to the context
:returns: WorkspaceRealTimeStatisticsContext for this WorkspaceRealTimeStatisticsInstance
:rtype: twilio.re... | Generate an instance context for the instance, the context is capable of
performing various actions. All instance actions are proxied to the context
:returns: WorkspaceRealTimeStatisticsContext for this WorkspaceRealTimeStatisticsInstance
:rtype: twilio.rest.taskrouter.v1.workspace.workspace_r... |
def _internal_kv_get(key):
"""Fetch the value of a binary key."""
worker = ray.worker.get_global_worker()
if worker.mode == ray.worker.LOCAL_MODE:
return _local.get(key)
return worker.redis_client.hget(key, "value") | Fetch the value of a binary key. |
def from_rdata_list(ttl, rdatas):
"""Create an rdataset with the specified TTL, and with
the specified list of rdata objects.
@rtype: dns.rdataset.Rdataset object
"""
if len(rdatas) == 0:
raise ValueError("rdata list must not be empty")
r = None
for rd in rdatas:
if r is No... | Create an rdataset with the specified TTL, and with
the specified list of rdata objects.
@rtype: dns.rdataset.Rdataset object |
def _get_LMv2_response(user_name, password, domain_name, server_challenge, client_challenge):
"""
[MS-NLMP] v28.0 2016-07-14
2.2.2.4 LMv2_RESPONSE
The LMv2_RESPONSE structure defines the NTLM v2 authentication LmChallengeResponse
in the AUTHENTICATE_MESSAGE. This response is use... | [MS-NLMP] v28.0 2016-07-14
2.2.2.4 LMv2_RESPONSE
The LMv2_RESPONSE structure defines the NTLM v2 authentication LmChallengeResponse
in the AUTHENTICATE_MESSAGE. This response is used only when NTLM v2
authentication is configured.
:param user_name: The user name of the user we ... |
def width(self, value):
"""gets/sets the width"""
if self._width != value and \
isinstance(value, (int, float, long)):
self._width = value | gets/sets the width |
def reset_tip_tracking(self):
"""
Resets the :any:`Pipette` tip tracking, "refilling" the tip racks
"""
self.current_tip(None)
self.tip_rack_iter = iter([])
if self.has_tip_rack():
iterables = self.tip_racks
if self.channels > 1:
... | Resets the :any:`Pipette` tip tracking, "refilling" the tip racks |
def extract_fragment(self, iri: str) -> str:
''' Pulls only for code/ID from the iri
I only add the str() conversion for the iri because rdflib objects need to be converted.
'''
fragment = str(iri).rsplit('/')[-1].split(':', 1)[-1].split('#', 1)[-1].split('_', 1)[-1]
return frag... | Pulls only for code/ID from the iri
I only add the str() conversion for the iri because rdflib objects need to be converted. |
def run(cls, command, cwd=".", **kwargs):
"""
Make a subprocess call, collect its output and returncode.
Returns CommandResult instance as ValueObject.
"""
assert isinstance(command, six.string_types)
command_result = CommandResult()
command_result.command = comma... | Make a subprocess call, collect its output and returncode.
Returns CommandResult instance as ValueObject. |
def get_sms_connection(backend=None, fail_silently=False, **kwds):
"""Load an sms backend and return an instance of it.
If backend is None (default) settings.SMS_BACKEND is used.
Both fail_silently and other keyword arguments are used in the
constructor of the backend.
https://github.com/django/django/blob/mast... | Load an sms backend and return an instance of it.
If backend is None (default) settings.SMS_BACKEND is used.
Both fail_silently and other keyword arguments are used in the
constructor of the backend.
https://github.com/django/django/blob/master/django/core/mail/__init__.py#L28 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.