code stringlengths 75 104k | docstring stringlengths 1 46.9k |
|---|---|
def multisplit(s, seps=list(string.punctuation) + list(string.whitespace), blank=True):
r"""Just like str.split(), except that a variety (list) of seperators is allowed.
>>> multisplit(r'1-2?3,;.4+-', string.punctuation)
['1', '2', '3', '', '', '4', '', '']
>>> multisplit(r'1-2?3,;.4+-', string.punctua... | r"""Just like str.split(), except that a variety (list) of seperators is allowed.
>>> multisplit(r'1-2?3,;.4+-', string.punctuation)
['1', '2', '3', '', '', '4', '', '']
>>> multisplit(r'1-2?3,;.4+-', string.punctuation, blank=False)
['1', '2', '3', '4']
>>> multisplit(r'1C 234567890', '\x00\x01\x0... |
def handle_change(self, change):
""" Handle changes from atom ContainerLists """
op = change['operation']
if op in 'append':
self.add(len(change['value']), LatLng(*change['item']))
elif op == 'insert':
self.add(change['index'], LatLng(*change['item']))
eli... | Handle changes from atom ContainerLists |
def verify_space_available(self, search_pattern=r"(\d+) \w+ free"):
"""Verify sufficient space is available on destination file system (return boolean)."""
if self.direction == "put":
space_avail = self.remote_space_available(search_pattern=search_pattern)
elif self.direction == "get... | Verify sufficient space is available on destination file system (return boolean). |
def removeRedundantVerbChains( foundChains, removeOverlapping = True, removeSingleAraAndEi = False ):
''' Eemaldab yleliigsed verbiahelad: ahelad, mis katavad osaliselt v6i t2ielikult
teisi ahelaid (removeOverlapping == True), yhes6nalised 'ei' ja 'ära' ahelad (kui
removeSingleAraAndEi == True)... | Eemaldab yleliigsed verbiahelad: ahelad, mis katavad osaliselt v6i t2ielikult
teisi ahelaid (removeOverlapping == True), yhes6nalised 'ei' ja 'ära' ahelad (kui
removeSingleAraAndEi == True);
Yldiselt on nii, et ylekattuvaid ei tohiks palju olla, kuna fraaside laiendamisel
... |
def find_files(path, exts=None):
"""
查找路径下的文件,返回指定类型的文件列表
:param:
* path: (string) 查找路径
* exts: (list) 文件类型列表,默认为空
:return:
* files_list: (list) 文件列表
举例如下::
print('--- find_files demo ---')
path1 = '/root/fishbase_issue'
all_files = find_files(path... | 查找路径下的文件,返回指定类型的文件列表
:param:
* path: (string) 查找路径
* exts: (list) 文件类型列表,默认为空
:return:
* files_list: (list) 文件列表
举例如下::
print('--- find_files demo ---')
path1 = '/root/fishbase_issue'
all_files = find_files(path1)
print(all_files)
exts_file... |
def offset(self, location, dy=0):
""" Returns a new ``Region`` offset from this one by ``location``
Width and height remain the same
"""
if not isinstance(location, Location):
# Assume variables passed were dx,dy
location = Location(location, dy)
r = Regi... | Returns a new ``Region`` offset from this one by ``location``
Width and height remain the same |
def is_rigid(matrix):
"""
Check to make sure a homogeonous transformation matrix is
a rigid body transform.
Parameters
-----------
matrix: possibly a transformation matrix
Returns
-----------
check: bool, True if matrix is a valid (4,4) rigid body transform.
"""
matrix = n... | Check to make sure a homogeonous transformation matrix is
a rigid body transform.
Parameters
-----------
matrix: possibly a transformation matrix
Returns
-----------
check: bool, True if matrix is a valid (4,4) rigid body transform. |
def URL(base, path, segments=None, defaults=None):
"""
URL segment handler capable of getting and setting segments by name. The
URL is constructed by joining base, path and segments.
For each segment a property capable of getting and setting that segment is
created dynamically.
"""
# Make a... | URL segment handler capable of getting and setting segments by name. The
URL is constructed by joining base, path and segments.
For each segment a property capable of getting and setting that segment is
created dynamically. |
def mutating_method(func):
"""Decorator for methods that are allowed to modify immutable objects"""
def wrapper(self, *__args, **__kwargs):
old_mutable = self._mutable
self._mutable = True
try:
# Call the wrapped function
return func(self, *__args, **__kwargs)
... | Decorator for methods that are allowed to modify immutable objects |
async def _process_request(self, identity: bytes, empty_frame: list, request: RPCRequest):
"""
Executes the method specified in a JSON RPC request and then sends the reply to the socket.
:param identity: Client identity provided by ZeroMQ
:param empty_frame: Either an empty list or a si... | Executes the method specified in a JSON RPC request and then sends the reply to the socket.
:param identity: Client identity provided by ZeroMQ
:param empty_frame: Either an empty list or a single null frame depending on the client type
:param request: JSON RPC request |
def create_comment_edit(self, ):
"""Create a text edit for comments
:returns: the created text edit
:rtype: :class:`jukeboxcore.gui.widgets.textedit.JB_PlainTextEdit`
:raises: None
"""
pte = JB_PlainTextEdit(parent=self)
pte.set_placeholder("Enter a comment befor... | Create a text edit for comments
:returns: the created text edit
:rtype: :class:`jukeboxcore.gui.widgets.textedit.JB_PlainTextEdit`
:raises: None |
def concatenate_lists(*layers, **kwargs): # pragma: no cover
"""Compose two or more models `f`, `g`, etc, such that their outputs are
concatenated, i.e. `concatenate(f, g)(x)` computes `hstack(f(x), g(x))`
"""
if not layers:
return noop()
drop_factor = kwargs.get("drop_factor", 1.0)
ops... | Compose two or more models `f`, `g`, etc, such that their outputs are
concatenated, i.e. `concatenate(f, g)(x)` computes `hstack(f(x), g(x))` |
def command_err(self, code=1, errmsg='MockupDB command failure',
*args, **kwargs):
"""Error reply to a command.
Returns True so it is suitable as an `~MockupDB.autoresponds` handler.
"""
kwargs.setdefault('ok', 0)
kwargs['code'] = code
kwargs['errmsg'... | Error reply to a command.
Returns True so it is suitable as an `~MockupDB.autoresponds` handler. |
def exists_alias(self, alias_name, index_name=None):
"""Check whether or not the given alias exists
:return: True if alias already exist"""
return self._es_conn.indices.exists_alias(index=index_name, name=alias_name) | Check whether or not the given alias exists
:return: True if alias already exist |
def publish(self):
"""Publish GitHub release as record."""
with db.session.begin_nested():
deposit = self.deposit_class.create(self.metadata)
deposit['_deposit']['created_by'] = self.event.user_id
deposit['_deposit']['owners'] = [self.event.user_id]
# Fet... | Publish GitHub release as record. |
def _validate(self, writing=False):
"""Verify that the box obeys the specifications."""
for box in self.DR:
if box.box_id != 'url ':
msg = ('Child boxes of a data reference box can only be data '
'entry URL boxes.')
self._dispatch_valida... | Verify that the box obeys the specifications. |
def p_sequenceItems(self, p):
"""sequenceItems : sequenceItems ',' sequenceItem
| sequenceItem"""
# libsmi: TODO: might this list be emtpy?
n = len(p)
if n == 4:
p[0] = p[1] + [p[3]]
elif n == 2:
p[0] = [p[1]] | sequenceItems : sequenceItems ',' sequenceItem
| sequenceItem |
def wait(self):
'''
Wait until the ui object gone or exist.
Usage:
d(text="Clock").wait.gone() # wait until it's gone.
d(text="Settings").wait.exists() # wait until it appears.
'''
@param_to_property(action=["exists", "gone"])
def _wait(action, timeout=30... | Wait until the ui object gone or exist.
Usage:
d(text="Clock").wait.gone() # wait until it's gone.
d(text="Settings").wait.exists() # wait until it appears. |
def _call_post_with_user_override(self, sap_user_id, url, payload):
"""
Make a post request with an auth token acquired for a specific user to a SuccessFactors endpoint.
Args:
sap_user_id (str): The user to use to retrieve an auth token.
url (str): The url to post to.
... | Make a post request with an auth token acquired for a specific user to a SuccessFactors endpoint.
Args:
sap_user_id (str): The user to use to retrieve an auth token.
url (str): The url to post to.
payload (str): The json encoded payload to post. |
def view(template=None):
"""
Create the Maintenance view
Must be instantiated
import maintenance_view
MaintenanceView = maintenance_view()
:param template_: The directory containing the view pages
:return:
"""
if not template:
template = "Juice/Plugin/MaintenancePage/index.... | Create the Maintenance view
Must be instantiated
import maintenance_view
MaintenanceView = maintenance_view()
:param template_: The directory containing the view pages
:return: |
def Tmatrix(X):
"""
gets the orientation matrix (T) from data in X
"""
T = [[0., 0., 0.], [0., 0., 0.], [0., 0., 0.]]
for row in X:
for k in range(3):
for l in range(3):
T[k][l] += row[k] * row[l]
return T | gets the orientation matrix (T) from data in X |
def password_valid(self, wallet):
"""
Checks whether the password entered for **wallet** is valid
:param wallet: Wallet to check password for
:type wallet: str
:raises: :py:exc:`nano.rpc.RPCException`
>>> rpc.password_valid(
... wallet="000D1BAEC8EC208142C9... | Checks whether the password entered for **wallet** is valid
:param wallet: Wallet to check password for
:type wallet: str
:raises: :py:exc:`nano.rpc.RPCException`
>>> rpc.password_valid(
... wallet="000D1BAEC8EC208142C99059B393051BAC8380F9B5A2E6B2489A277D81789F3F"
... |
def get_learner_data_records(self, enterprise_enrollment, completed_date=None, grade=None, is_passing=False):
"""
Return a SapSuccessFactorsLearnerDataTransmissionAudit with the given enrollment and course completion data.
If completed_date is None and the learner isn't passing, then course com... | Return a SapSuccessFactorsLearnerDataTransmissionAudit with the given enrollment and course completion data.
If completed_date is None and the learner isn't passing, then course completion has not been met.
If no remote ID can be found, return None. |
def _pct_diff(self, best, other):
""" Calculates and colorizes the percent difference between @best
and @other
"""
return colorize("{}%".format(
round(((best-other)/best)*100, 2)).rjust(10), "red") | Calculates and colorizes the percent difference between @best
and @other |
def close(self):
"""Close the stream
"""
self.closed = True
self._flush_bits_to_stream()
self._stream.close() | Close the stream |
def delete_empty_children(self):
"""
Walk through the children of this node and delete any that are empty.
"""
for child in self.children:
child.delete_empty_children()
try:
if os.path.exists(child.full_path):
os.rmdir(child.ful... | Walk through the children of this node and delete any that are empty. |
def _verify_credentials(self):
"""
An internal method that verifies the credentials given at instantiation.
:raises: :class:`Pymoe.errors.UserLoginFailed`
"""
r = requests.get(self.apiurl + "account/verify_credentials.xml",
auth=HTTPBasicAuth(self._usern... | An internal method that verifies the credentials given at instantiation.
:raises: :class:`Pymoe.errors.UserLoginFailed` |
def encrypt(key, message):
'''encrypt leverages KMS encrypt and base64-encode encrypted blob
More info on KMS encrypt API:
https://docs.aws.amazon.com/kms/latest/APIReference/API_encrypt.html
'''
try:
ret = kms.encrypt(KeyId=key, Plaintext=message)
encrypted_data = base64.en... | encrypt leverages KMS encrypt and base64-encode encrypted blob
More info on KMS encrypt API:
https://docs.aws.amazon.com/kms/latest/APIReference/API_encrypt.html |
def max(a, axis=None):
"""
Request the maximum of an Array over any number of axes.
.. note:: Currently limited to operating on a single axis.
Parameters
----------
a : Array object
The object whose maximum is to be found.
axis : None, or int, or iterable of ints
Axis or ax... | Request the maximum of an Array over any number of axes.
.. note:: Currently limited to operating on a single axis.
Parameters
----------
a : Array object
The object whose maximum is to be found.
axis : None, or int, or iterable of ints
Axis or axes along which the operation is per... |
def str2rsi(key):
"""
Convert a string of the form 'rlz-XXXX/sid-YYYY/ZZZ'
into a triple (XXXX, YYYY, ZZZ)
"""
rlzi, sid, imt = key.split('/')
return int(rlzi[4:]), int(sid[4:]), imt | Convert a string of the form 'rlz-XXXX/sid-YYYY/ZZZ'
into a triple (XXXX, YYYY, ZZZ) |
def _onDisconnect(self, mqttc, obj, rc):
"""
Called when the client disconnects from IBM Watson IoT Platform.
See [paho.mqtt.python#on_disconnect](https://github.com/eclipse/paho.mqtt.python#on_disconnect) for more information
# Parameters
mqttc (paho.mqtt.clien... | Called when the client disconnects from IBM Watson IoT Platform.
See [paho.mqtt.python#on_disconnect](https://github.com/eclipse/paho.mqtt.python#on_disconnect) for more information
# Parameters
mqttc (paho.mqtt.client.Client): The client instance for this callback
obj ... |
def get_zipped_dataset_from_predictions(predictions):
"""Creates dataset from in-memory predictions."""
targets = stack_data_given_key(predictions, "targets")
outputs = stack_data_given_key(predictions, "outputs")
num_videos, num_steps = targets.shape[:2]
# Truncate output time-steps to match target time-ste... | Creates dataset from in-memory predictions. |
def put(self, path, data, **options):
"""
Parses PUT request options and dispatches a request
"""
data, options = self._update_request(data, options)
return self.request('put', path, data=data, **options) | Parses PUT request options and dispatches a request |
def load(self, env=None):
""" Load a section values of given environment.
If nothing to specified, use environmental variable.
If unknown environment was specified, warn it on logger.
:param env: environment key to load in a coercive manner
:type env: string
:rtype: dict... | Load a section values of given environment.
If nothing to specified, use environmental variable.
If unknown environment was specified, warn it on logger.
:param env: environment key to load in a coercive manner
:type env: string
:rtype: dict |
def _ParseMFTEntry(self, parser_mediator, mft_entry):
"""Extracts data from a NFTS $MFT entry.
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
mft_entry (pyfsntfs.file_entry): MFT entry.
"""
for attribu... | Extracts data from a NFTS $MFT entry.
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
mft_entry (pyfsntfs.file_entry): MFT entry. |
def milestone(self, number):
"""Get the milestone indicated by ``number``.
:param int number: (required), unique id number of the milestone
:returns: :class:`Milestone <github3.issues.milestone.Milestone>`
"""
json = None
if int(number) > 0:
url = self._build... | Get the milestone indicated by ``number``.
:param int number: (required), unique id number of the milestone
:returns: :class:`Milestone <github3.issues.milestone.Milestone>` |
def fixed_vectors_encoding(index_encoded_sequences, letter_to_vector_df):
"""
Given a `n` x `k` matrix of integers such as that returned by `index_encoding()` and
a dataframe mapping each index to an arbitrary vector, return a `n * k * m`
array where the (`i`, `j`)'th element is `letter_to_vector_df.ilo... | Given a `n` x `k` matrix of integers such as that returned by `index_encoding()` and
a dataframe mapping each index to an arbitrary vector, return a `n * k * m`
array where the (`i`, `j`)'th element is `letter_to_vector_df.iloc[sequence[i][j]]`.
The dataframe index and columns names are ignored here; the i... |
def _get(self, *args, **kwargs):
"""
Retrieve unread messages for current user, both from the inbox and
from other storages
"""
messages, all_retrieved = super(StorageMixin, self)._get(*args, **kwargs)
if self.user.is_authenticated():
inbox_messages = self.bac... | Retrieve unread messages for current user, both from the inbox and
from other storages |
def init_providers(self, provider, kwargs):
"""
Inits main and fallback provider if relevant
:param provider: Provider name to use
:param kwargs: Additional kwargs
:raises ValueError: If provider name or fallback names are not valid providers, a :exc:`ValueError` will
b... | Inits main and fallback provider if relevant
:param provider: Provider name to use
:param kwargs: Additional kwargs
:raises ValueError: If provider name or fallback names are not valid providers, a :exc:`ValueError` will
be raised |
def window_nuttall(N):
r"""Nuttall tapering window
:param N: window length
.. math:: w(n) = a_0 - a_1 \cos\left(\frac{2\pi n}{N-1}\right)+ a_2 \cos\left(\frac{4\pi n}{N-1}\right)- a_3 \cos\left(\frac{6\pi n}{N-1}\right)
with :math:`a_0 = 0.355768`, :math:`a_1 = 0.487396`, :math:`a_2=0.144232` and :ma... | r"""Nuttall tapering window
:param N: window length
.. math:: w(n) = a_0 - a_1 \cos\left(\frac{2\pi n}{N-1}\right)+ a_2 \cos\left(\frac{4\pi n}{N-1}\right)- a_3 \cos\left(\frac{6\pi n}{N-1}\right)
with :math:`a_0 = 0.355768`, :math:`a_1 = 0.487396`, :math:`a_2=0.144232` and :math:`a_3=0.012604`
.. p... |
def save(self, *args, **kwargs):
"""
**uid**: :code:`{person.uid}_candidate:{party.uid}-{cycle.ap_code}`
"""
self.uid = "{}_candidate:{}-{}".format(
self.person.uid, self.party.uid, self.race.cycle.uid
)
super(Candidate, self).save(*args, **kwargs) | **uid**: :code:`{person.uid}_candidate:{party.uid}-{cycle.ap_code}` |
def archive_handler(unused_build_context, target, fetch, package_dir, tar):
"""Handle remote downloadable archive URI.
Download the archive and cache it under the private builer workspace
(unless already downloaded), extract it, and add the content to the
package tar.
TODO(itamar): Support re-down... | Handle remote downloadable archive URI.
Download the archive and cache it under the private builer workspace
(unless already downloaded), extract it, and add the content to the
package tar.
TODO(itamar): Support re-downloading if remote changed compared to local.
TODO(itamar): Support more archive... |
def fopen(*args, **kwargs):
'''
Wrapper around open() built-in to set CLOEXEC on the fd.
This flag specifies that the file descriptor should be closed when an exec
function is invoked;
When a file descriptor is allocated (as with open or dup), this bit is
initially cleared on the new file desc... | Wrapper around open() built-in to set CLOEXEC on the fd.
This flag specifies that the file descriptor should be closed when an exec
function is invoked;
When a file descriptor is allocated (as with open or dup), this bit is
initially cleared on the new file descriptor, meaning that descriptor will
... |
def on_release_key(key, callback, suppress=False):
"""
Invokes `callback` for KEY_UP event related to the given key. For details see `hook`.
"""
return hook_key(key, lambda e: e.event_type == KEY_DOWN or callback(e), suppress=suppress) | Invokes `callback` for KEY_UP event related to the given key. For details see `hook`. |
def _get_drift(step_size_parts, volatility_parts, grads_volatility,
grads_target_log_prob,
name=None):
"""Compute diffusion drift at the current location `current_state`.
The drift of the diffusion at is computed as
```none
0.5 * `step_size` * volatility_parts * `target_log_prob_... | Compute diffusion drift at the current location `current_state`.
The drift of the diffusion at is computed as
```none
0.5 * `step_size` * volatility_parts * `target_log_prob_fn(current_state)`
+ `step_size` * `grads_volatility`
```
where `volatility_parts` = `volatility_fn(current_state)**2` and
`grads... |
def kappa_statistic(self):
r"""Return κ statistic.
The κ statistic is defined as:
:math:`\kappa = \frac{accuracy - random~ accuracy}
{1 - random~ accuracy}`
The κ statistic compares the performance of the classifier relative to
the performance of a random classifier. :m... | r"""Return κ statistic.
The κ statistic is defined as:
:math:`\kappa = \frac{accuracy - random~ accuracy}
{1 - random~ accuracy}`
The κ statistic compares the performance of the classifier relative to
the performance of a random classifier. :math:`\kappa` = 0 indicates
... |
def run_in_order(l, show_output=True, show_err=True, ignore_err=False,
args=(), **kwargs):
'''
Processes each element of l in order:
if it is a string: execute it as a shell command
elif it is a callable, call it with *args, **kwargs
l-->list: Each elem is either a string (... | Processes each element of l in order:
if it is a string: execute it as a shell command
elif it is a callable, call it with *args, **kwargs
l-->list: Each elem is either a string (shell command) or callable
Any other type is ignored
show_output-->boolean: Show stdout of shell comma... |
def _maybe_unique_host(onion):
"""
:param onion: IAuthenticatedOnionClients provider
:returns: a .onion hostname if all clients have the same name or
raises ValueError otherwise
"""
hosts = [
onion.get_client(nm).hostname
for nm in onion.client_names()
]
if not hosts... | :param onion: IAuthenticatedOnionClients provider
:returns: a .onion hostname if all clients have the same name or
raises ValueError otherwise |
def _update_doc_in_index(self, index_writer, doc):
"""
Add/Update a document in the index
"""
all_labels = set(self.label_list)
doc_labels = set(doc.labels)
new_labels = doc_labels.difference(all_labels)
# can happen when we recreate the index from scratch
... | Add/Update a document in the index |
def _create_breadcrumbs(self, relpath):
"""Create filesystem browsing breadcrumb navigation.
That is, make each path segment into a clickable element that takes you to that dir.
"""
if relpath == '.':
breadcrumbs = []
else:
path_parts = [os.path.basename(self._root)] + relpath.split(os.... | Create filesystem browsing breadcrumb navigation.
That is, make each path segment into a clickable element that takes you to that dir. |
def process(self, salt_data, token, opts):
'''
Process events and publish data
'''
parts = salt_data['tag'].split('/')
if len(parts) < 2:
return
# TBD: Simplify these conditional expressions
if parts[1] == 'job':
if parts[3] == 'new':
... | Process events and publish data |
def sec_overview(self):
"""
Generate the data for the Overview section in the report
:return:
"""
""" Data sources overview: table with metric summaries"""
metrics = self.config['overview']['activity_metrics']
file_name = self.config['overview']['activity_file_cs... | Generate the data for the Overview section in the report
:return: |
def path_regex(self):
"""Return the regex for the path to the build folder."""
regex = r'releases/%(VERSION)s/%(PLATFORM)s/%(LOCALE)s/'
return regex % {'LOCALE': self.locale,
'PLATFORM': self.platform_regex,
'VERSION': self.version} | Return the regex for the path to the build folder. |
def is_invalid_params(func, *args, **kwargs):
""" Check, whether function 'func' accepts parameters 'args', 'kwargs'.
NOTE: Method is called after funct(*args, **kwargs) generated TypeError,
it is aimed to destinguish TypeError because of invalid parameters from
TypeError from inside the function.
... | Check, whether function 'func' accepts parameters 'args', 'kwargs'.
NOTE: Method is called after funct(*args, **kwargs) generated TypeError,
it is aimed to destinguish TypeError because of invalid parameters from
TypeError from inside the function.
.. versionadded: 1.9.0 |
def submit_mult_calcs(calc_suite_specs, exec_options=None):
"""Generate and execute all specified computations.
Once the calculations are prepped and submitted for execution, any
calculation that triggers any exception or error is skipped, and the rest
of the calculations proceed unaffected. This prev... | Generate and execute all specified computations.
Once the calculations are prepped and submitted for execution, any
calculation that triggers any exception or error is skipped, and the rest
of the calculations proceed unaffected. This prevents an error in a single
calculation from crashing a large sui... |
def dataset_to_stream(dataset, input_name, num_chunks=0, append_targets=False):
"""Takes a tf.Dataset and creates a numpy stream of ready batches."""
for example in tfds.as_numpy(dataset):
inp, out = example[0][input_name], example[1]
if len(out.shape) > 1 and out.shape[-1] == 1:
out = np.squeeze(out,... | Takes a tf.Dataset and creates a numpy stream of ready batches. |
def dump_BSE_data_in_GW_run(self, BSE_dump=True):
"""
:param BSE_dump: boolean
:return: set the "do_bse" variable to one in cell.in
"""
if BSE_dump:
self.BSE_TDDFT_options.update(do_bse=1, do_tddft=0)
else:
self.BSE_TDDFT_options.update(do_bse=0, ... | :param BSE_dump: boolean
:return: set the "do_bse" variable to one in cell.in |
def rebuild(self):
"""
Rebuilds the scene based on the current settings.
:param start | <QDate>
end | <QDate>
"""
gantt = self.ganttWidget()
scale = gantt.timescale()
rect = self.sceneRect()
header = gantt.tree... | Rebuilds the scene based on the current settings.
:param start | <QDate>
end | <QDate> |
def main(graph):
"""
Create and drop databases.
"""
args = parse_args(graph)
if args.drop:
drop_all(graph)
create_all(graph) | Create and drop databases. |
def add_node(self, op_type, inputs, outputs, op_domain='', op_version=1, **attrs):
'''
Add a NodeProto into the node list of the final ONNX model. If the input operator's domain-version information
cannot be found in our domain-version pool (a Python set), we may add it.
:param op_type:... | Add a NodeProto into the node list of the final ONNX model. If the input operator's domain-version information
cannot be found in our domain-version pool (a Python set), we may add it.
:param op_type: A string (e.g., Pool and Conv) indicating the type of the NodeProto
:param inputs: A list of s... |
def validate(self, value):
"""Make sure that the inspected value is of type `list` or `tuple` """
if not isinstance(value, (list, tuple)) or isinstance(value, str_types):
self.raise_error('Only lists and tuples may be used in the ListField vs provided {0}'
.forma... | Make sure that the inspected value is of type `list` or `tuple` |
def prepare_native_return_state(native_state):
"""
Hook target for native function call returns.
Recovers and stores the return value from native memory and toggles the
state, s.t. execution continues in the Soot engine.
"""
javavm_simos = native_state.project.simos
... | Hook target for native function call returns.
Recovers and stores the return value from native memory and toggles the
state, s.t. execution continues in the Soot engine. |
def vector_similarity(self, vector, items):
"""Compute the similarity between a vector and a set of items."""
vector = self.normalize(vector)
items_vec = np.stack([self.norm_vectors[self.items[x]] for x in items])
return vector.dot(items_vec.T) | Compute the similarity between a vector and a set of items. |
def get_room(self, id):
""" Get room.
Returns:
:class:`Room`. Room
"""
if id not in self._rooms:
self._rooms[id] = Room(self, id)
return self._rooms[id] | Get room.
Returns:
:class:`Room`. Room |
def _delete_ubridge_connection(self, adapter_number):
"""
Deletes a connection in uBridge.
:param adapter_number: adapter number
"""
vnet = "ethernet{}.vnet".format(adapter_number)
if vnet not in self._vmx_pairs:
raise VMwareError("vnet {} not in VMX file".f... | Deletes a connection in uBridge.
:param adapter_number: adapter number |
def delete(self, deleteSubtasks=False):
"""Delete this issue from the server.
:param deleteSubtasks: if the issue has subtasks, this argument must be set to true for the call to succeed.
:type deleteSubtasks: bool
"""
super(Issue, self).delete(params={'deleteSubtasks': deleteSu... | Delete this issue from the server.
:param deleteSubtasks: if the issue has subtasks, this argument must be set to true for the call to succeed.
:type deleteSubtasks: bool |
def get_logger(name, level=0):
"""Setup a logging instance"""
level = 0 if not isinstance(level, int) else level
level = 0 if level < 0 else level
level = 4 if level > 4 else level
console = logging.StreamHandler()
level = [logging.NOTSET, logging.ERROR, logging.WARN, log... | Setup a logging instance |
def fit(self, X, y=None, **fit_params):
"""
Fit Unary Math Operator
:param y:
:return:
"""
if self.transform_type not in self.valid_transforms:
warnings.warn("Invalid transform type.", stacklevel=2)
return self | Fit Unary Math Operator
:param y:
:return: |
def export(self, path, column_names=None, byteorder="=", shuffle=False, selection=False, progress=None, virtual=False, sort=None, ascending=True):
"""Exports the DataFrame to a file written with arrow
:param DataFrameLocal df: DataFrame to export
:param str path: path for file
:param li... | Exports the DataFrame to a file written with arrow
:param DataFrameLocal df: DataFrame to export
:param str path: path for file
:param lis[str] column_names: list of column names to export or None for all columns
:param str byteorder: = for native, < for little endian and > for big endi... |
def get_orthology_matrix(self, outfile, sc, outdir=None,
pid_cutoff=None, bitscore_cutoff=None, evalue_cutoff=None,
force_rerun=False):
"""Create the orthology matrix by finding best bidirectional BLAST hits. Genes = rows, strains = columns
Runs... | Create the orthology matrix by finding best bidirectional BLAST hits. Genes = rows, strains = columns
Runs run_makeblastdb, run_bidirectional_blast, and calculate_bbh for protein sequences.
Args:
outfile (str): Filename with extension of the orthology matrix (ie. df_orthology.csv)
... |
def _get_or_generate_vocab(tmp_dir, vocab_filename, vocab_size):
"""Read or create vocabulary."""
vocab_filepath = os.path.join(tmp_dir, vocab_filename)
print('Vocab file written to: ' + vocab_filepath)
if tf.gfile.Exists(vocab_filepath):
gs = text_encoder.SubwordTextEncoder(vocab_filepath)
return gs
... | Read or create vocabulary. |
def get(self, name):
"""Returns the specified interfaces STP configuration resource
The STP interface resource contains the following
* name (str): The interface name
* portfast (bool): The spanning-tree portfast admin state
* bpduguard (bool): The spanning-tree bpd... | Returns the specified interfaces STP configuration resource
The STP interface resource contains the following
* name (str): The interface name
* portfast (bool): The spanning-tree portfast admin state
* bpduguard (bool): The spanning-tree bpduguard admin state
*... |
def delete_course(self, courseid):
"""
:param courseid: the course id of the course
:raise InvalidNameException or CourseNotFoundException
Erase the content of the course folder
"""
if not id_checker(courseid):
raise InvalidNameException("Course with invalid n... | :param courseid: the course id of the course
:raise InvalidNameException or CourseNotFoundException
Erase the content of the course folder |
def main(args=sys.argv):
"""Run the work() method from the class instance in the file "job-instance.pickle".
"""
try:
# Set up logging.
logging.basicConfig(level=logging.WARN)
work_dir = args[1]
assert os.path.exists(work_dir), "First argument to lsf_runner.py must be a direc... | Run the work() method from the class instance in the file "job-instance.pickle". |
def fail(self, message, view, type_error=False):
"""Raise an exception indicating that a value cannot be
accepted.
`type_error` indicates whether the error is due to a type
mismatch rather than a malformed value. In this case, a more
specific exception is raised.
"""
... | Raise an exception indicating that a value cannot be
accepted.
`type_error` indicates whether the error is due to a type
mismatch rather than a malformed value. In this case, a more
specific exception is raised. |
def visit_Call(self, node):
"""Call visitor - used for finding setup() call."""
self.generic_visit(node)
# Setup() is a keywords-only function.
if node.args:
return
keywords = set()
for k in node.keywords:
if k.arg is not None:
ke... | Call visitor - used for finding setup() call. |
def process(specs):
"""
Executes the passed in list of specs
"""
pout, pin = chain_specs(specs)
LOG.info("Processing")
sw = StopWatch().start()
r = pout.process(pin)
if r:
print(r)
LOG.info("Finished in %s", sw.read()) | Executes the passed in list of specs |
def get_lb_conn(dd_driver=None):
'''
Return a load-balancer conn object
'''
vm_ = get_configured_provider()
region = config.get_cloud_config_value(
'region', vm_, __opts__
)
user_id = config.get_cloud_config_value(
'user_id', vm_, __opts__
)
key = config.get_cloud_co... | Return a load-balancer conn object |
def fix_virtualenv_tkinter():
"""
work-a-round for tkinter under windows in a virtualenv:
"TclError: Can't find a usable init.tcl..."
Known bug, see: https://github.com/pypa/virtualenv/issues/93
There are "fix tk" file here:
C:\Python27\Lib\lib-tk\FixTk.py
C:\Python34\Lib\tki... | work-a-round for tkinter under windows in a virtualenv:
"TclError: Can't find a usable init.tcl..."
Known bug, see: https://github.com/pypa/virtualenv/issues/93
There are "fix tk" file here:
C:\Python27\Lib\lib-tk\FixTk.py
C:\Python34\Lib\tkinter\_fix.py
These modules will be au... |
def jsondeclarations(self):
"""Return all declarations in a form ready to be serialised to JSON.
Returns:
list of dict
"""
l = []
for annotationtype, set in self.annotations:
label = None
#Find the 'label' for the declarations dynamically (aka... | Return all declarations in a form ready to be serialised to JSON.
Returns:
list of dict |
def put_metadata(self, key, value, namespace='default'):
"""
Add metadata to segment or subsegment. Metadata is not indexed
but can be later retrieved by BatchGetTraces API.
:param str namespace: optional. Default namespace is `default`.
It must be a string and prefix `AWS.`... | Add metadata to segment or subsegment. Metadata is not indexed
but can be later retrieved by BatchGetTraces API.
:param str namespace: optional. Default namespace is `default`.
It must be a string and prefix `AWS.` is reserved.
:param str key: metadata key under specified namespace
... |
def show(cls, result):
"""
:param TryHaskell.Result result: Parse result of JSON data.
:rtype: str|unicode
"""
if result.ok:
if result.stdout:
out = '\n'.join(result.stdout)
if result.value and result.value != '()':
... | :param TryHaskell.Result result: Parse result of JSON data.
:rtype: str|unicode |
def get_objects(self, path, marker=None,
limit=settings.CLOUD_BROWSER_DEFAULT_LIST_LIMIT):
"""Get objects.
Certain upload clients may add a 0-byte object (e.g., ``FOLDER`` object
for path ``path/to/FOLDER`` - ``path/to/FOLDER/FOLDER``). We add an
extra +1 limit query... | Get objects.
Certain upload clients may add a 0-byte object (e.g., ``FOLDER`` object
for path ``path/to/FOLDER`` - ``path/to/FOLDER/FOLDER``). We add an
extra +1 limit query and ignore any such file objects. |
def _set_scores(self):
"""
Compute anomaly scores for the time series.
"""
anom_scores = {}
self._compute_derivatives()
derivatives_ema = utils.compute_ema(self.smoothing_factor, self.derivatives)
for i, (timestamp, value) in enumerate(self.time_series_items):
anom_scores[timestamp] = ... | Compute anomaly scores for the time series. |
def Orth(docs, drop=0.0):
"""Get word forms."""
ids = numpy.zeros((sum(len(doc) for doc in docs),), dtype="i")
i = 0
for doc in docs:
for token in doc:
ids[i] = token.orth
i += 1
return ids, None | Get word forms. |
def onpick(self, event):
"""Called per artist (group), with possibly a list of indices."""
if hasattr(event.artist, '_mt_legend_item'):
# legend item, instead of data point
idx = event.artist._mt_legend_item
try:
self.toggle_artist(self.artists[idx])
... | Called per artist (group), with possibly a list of indices. |
def addproperties(
names,
bfget=None, afget=None, enableget=True,
bfset=None, afset=None, enableset=True,
bfdel=None, afdel=None, enabledel=True
):
"""Decorator in charge of adding python properties to cls.
{a/b}fget, {a/b}fset and {a/b}fdel are applied to all properties matchin... | Decorator in charge of adding python properties to cls.
{a/b}fget, {a/b}fset and {a/b}fdel are applied to all properties matching
names in taking care to not forget default/existing properties. The
prefixes *a* and *b* are respectively for after and before default/existing
property getter/setter/delete... |
def cycle_running_window(iterable, size):
"""Generate n-size cycle running window.
Example::
>>> for i in running_windows([1, 2, 3, 4, 5], size=3):
... print(i)
[1, 2, 3]
[2, 3, 4]
[3, 4, 5]
[4, 5, 1]
[5, 1, 2]
**中文文档**
循环位移滑窗函数。
"""
... | Generate n-size cycle running window.
Example::
>>> for i in running_windows([1, 2, 3, 4, 5], size=3):
... print(i)
[1, 2, 3]
[2, 3, 4]
[3, 4, 5]
[4, 5, 1]
[5, 1, 2]
**中文文档**
循环位移滑窗函数。 |
def alphanum_variable(min_size, max_size, name=None):
"""
Creates the grammar for an alphanumeric code where the size ranges between
two values.
:param min_size: minimum size
:param max_size: maximum size
:param name: name for the field
:return: grammar for an alphanumeric field of a variab... | Creates the grammar for an alphanumeric code where the size ranges between
two values.
:param min_size: minimum size
:param max_size: maximum size
:param name: name for the field
:return: grammar for an alphanumeric field of a variable size |
def atlas_get_all_neighbors( peer_table=None ):
"""
Get *all* neighbor information.
USED ONLY FOR TESTING
"""
if os.environ.get("BLOCKSTACK_ATLAS_NETWORK_SIMULATION") != "1":
raise Exception("This method is only available when testing with the Atlas network simulator")
ret = {}
wit... | Get *all* neighbor information.
USED ONLY FOR TESTING |
def _init_connection(self):
"""
Requests the session ids of the bridge.
:returns: True, if initialization was successful. False, otherwise.
"""
try:
# We are changing self.is_ready: lock it up!
self._lock.acquire()
response = bytearray(22)
... | Requests the session ids of the bridge.
:returns: True, if initialization was successful. False, otherwise. |
def agitator_time_homogeneous(N, P, T, H, mu, rho, D=None, homogeneity=.95):
r'''Calculates time for a fluid mizing in a tank with an impeller to
reach a specified level of homogeneity, according to [1]_.
.. math::
N_p = \frac{Pg}{\rho N^3 D^5}
.. math::
Re_{imp} = \frac{\rho D^2 N}{\m... | r'''Calculates time for a fluid mizing in a tank with an impeller to
reach a specified level of homogeneity, according to [1]_.
.. math::
N_p = \frac{Pg}{\rho N^3 D^5}
.. math::
Re_{imp} = \frac{\rho D^2 N}{\mu}
.. math::
\text{constant} = N_p^{1/3} Re_{imp}
.. math::
... |
def highlight_options(self, **kwargs):
"""
Update the global highlighting options used for this request. For
example::
s = Search()
s = s.highlight_options(order='score')
"""
s = self._clone()
s._highlight_opts.update(kwargs)
return s | Update the global highlighting options used for this request. For
example::
s = Search()
s = s.highlight_options(order='score') |
def setup_splittable_dax_generated(workflow, input_tables, out_dir, tags):
'''
Function for setting up the splitting jobs as part of the workflow.
Parameters
-----------
workflow : pycbc.workflow.core.Workflow
The Workflow instance that the jobs will be added to.
input_tables : pycbc.wo... | Function for setting up the splitting jobs as part of the workflow.
Parameters
-----------
workflow : pycbc.workflow.core.Workflow
The Workflow instance that the jobs will be added to.
input_tables : pycbc.workflow.core.FileList
The input files to be split up.
out_dir : path
... |
def canonicalize_observed_time_series_with_mask(
maybe_masked_observed_time_series):
"""Extract a Tensor with canonical shape and optional mask.
Args:
maybe_masked_observed_time_series: a `Tensor`-like object with shape
`[..., num_timesteps]` or `[..., num_timesteps, 1]`, or a
`tfp.sts.MaskedTi... | Extract a Tensor with canonical shape and optional mask.
Args:
maybe_masked_observed_time_series: a `Tensor`-like object with shape
`[..., num_timesteps]` or `[..., num_timesteps, 1]`, or a
`tfp.sts.MaskedTimeSeries` containing such an object.
Returns:
masked_time_series: a `tfp.sts.MaskedTimeS... |
def _isophote_list_to_table(isophote_list):
"""
Convert an `~photutils.isophote.IsophoteList` instance to
a `~astropy.table.QTable`.
Parameters
----------
isophote_list : list of `~photutils.isophote.Isophote` or a `~photutils.isophote.IsophoteList` instance
A list of isophotes.
Re... | Convert an `~photutils.isophote.IsophoteList` instance to
a `~astropy.table.QTable`.
Parameters
----------
isophote_list : list of `~photutils.isophote.Isophote` or a `~photutils.isophote.IsophoteList` instance
A list of isophotes.
Returns
-------
result : `~astropy.table.QTable`
... |
def most_hot(self):
"""
Returns the *Weather* object in the forecast having the highest max
temperature. The temperature is retrieved using the
``get_temperature['temp_max']`` call; was 'temp_max' key missing for
every *Weather* instance in the forecast, ``None`` would be returne... | Returns the *Weather* object in the forecast having the highest max
temperature. The temperature is retrieved using the
``get_temperature['temp_max']`` call; was 'temp_max' key missing for
every *Weather* instance in the forecast, ``None`` would be returned.
:returns: a *Weather* object... |
def fit_size_models(self, model_names,
model_objs,
input_columns,
output_column="Hail_Size",
output_start=5,
output_step=5,
output_stop=100):
"""
Fit size model... | Fit size models to produce discrete pdfs of forecast hail sizes.
Args:
model_names: List of model names
model_objs: List of model objects
input_columns: List of input variables
output_column: Output variable name
output_start: Hail size bin start
... |
def linear(m=1, b=0):
''' Return a driver function that can advance a sequence of linear values.
.. code-block:: none
value = m * i + b
Args:
m (float) : a slope for the linear driver
x (float) : an offset for the linear driver
'''
def f(i):
return m * i + b
r... | Return a driver function that can advance a sequence of linear values.
.. code-block:: none
value = m * i + b
Args:
m (float) : a slope for the linear driver
x (float) : an offset for the linear driver |
def setup_argparse():
"""
Setup the argparse argument parser
:return: instance of argparse
:rtype: ArgumentParser
"""
parser = argparse.ArgumentParser(
description='Convert old ini-style GNS3 topologies (<=0.8.7) to '
'the newer version 1+ JSON format')
parser.ad... | Setup the argparse argument parser
:return: instance of argparse
:rtype: ArgumentParser |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.