code stringlengths 75 104k | docstring stringlengths 1 46.9k |
|---|---|
def _position_encoding_init(max_length, dim):
"""Init the sinusoid position encoding table """
position_enc = np.arange(max_length).reshape((-1, 1)) \
/ (np.power(10000, (2. / dim) * np.arange(dim).reshape((1, -1))))
# Apply the cosine to even columns and sin to odds.
position_enc[:, ... | Init the sinusoid position encoding table |
def get_nift_values() -> Mapping[str, str]:
"""Extract the list of NIFT names from the BEL resource and builds a dictionary mapping from the lowercased version
to the uppercase version.
"""
r = get_bel_resource(NIFT)
return {
name.lower(): name
for name in r['Values']
} | Extract the list of NIFT names from the BEL resource and builds a dictionary mapping from the lowercased version
to the uppercase version. |
def redo(self, channel, image):
"""Add an entry with image modification info."""
chname = channel.name
if image is None:
# shouldn't happen, but let's play it safe
return
imname = image.get('name', 'none')
iminfo = channel.get_image_info(imname)
t... | Add an entry with image modification info. |
def attr_string(filterKeys=(), filterValues=(), **kwargs):
"""Build a string consisting of 'key=value' substrings for each keyword
argument in :kwargs:
@param filterKeys: list of key names to ignore
@param filterValues: list of values to ignore (e.g. None will ignore all
key=va... | Build a string consisting of 'key=value' substrings for each keyword
argument in :kwargs:
@param filterKeys: list of key names to ignore
@param filterValues: list of values to ignore (e.g. None will ignore all
key=value pairs that has that value. |
def purge_tokens(self, input_token_attrs=None):
""" Removes all specified token_attrs that exist in instance.token_attrs
:param token_attrs: list(str), list of string values of tokens to remove. If None, removes all
"""
if input_token_attrs is None:
remove_attrs = s... | Removes all specified token_attrs that exist in instance.token_attrs
:param token_attrs: list(str), list of string values of tokens to remove. If None, removes all |
def fetch_and_index(self, fetch_func):
"Fetch data with func, return dict indexed by ID"
data, e = fetch_func()
if e: raise e
yield {row['id']: row for row in data} | Fetch data with func, return dict indexed by ID |
def search_all(self, quota=50, format='json'):
'''
Returns a single list containing up to 'limit' Result objects
Will keep requesting until quota is met
Will also truncate extra results to return exactly the given quota
'''
quota_left = quota
results = []
... | Returns a single list containing up to 'limit' Result objects
Will keep requesting until quota is met
Will also truncate extra results to return exactly the given quota |
def keyPressEvent(self, event):
"""
Listen for the delete key and check to see if this should auto
set the remove property on the object.
:param event | <QKeyPressEvent>
"""
# tag the item for deletion
if self.useDefaultKeystrokes() and self.... | Listen for the delete key and check to see if this should auto
set the remove property on the object.
:param event | <QKeyPressEvent> |
def _get_log_entries(self) -> List[Tuple[int, bytes, List[int], bytes]]:
"""
Return the log entries for this computation and its children.
They are sorted in the same order they were emitted during the transaction processing, and
include the sequential counter as the first element of th... | Return the log entries for this computation and its children.
They are sorted in the same order they were emitted during the transaction processing, and
include the sequential counter as the first element of the tuple representing every entry. |
def process_module(self, node):
"""Process the astroid node stream."""
if self.config.file_header:
if sys.version_info[0] < 3:
pattern = re.compile(
'\A' + self.config.file_header, re.LOCALE | re.MULTILINE)
else:
# The use of re... | Process the astroid node stream. |
def validate_cmap(val):
"""Validate a colormap
Parameters
----------
val: str or :class:`mpl.colors.Colormap`
Returns
-------
str or :class:`mpl.colors.Colormap`
Raises
------
ValueError"""
from matplotlib.colors import Colormap
try:
return validate_str(val)
... | Validate a colormap
Parameters
----------
val: str or :class:`mpl.colors.Colormap`
Returns
-------
str or :class:`mpl.colors.Colormap`
Raises
------
ValueError |
def f(x, depth1, depth2, dim='2d', first_batch_norm=True, stride=1,
training=True, bottleneck=True, padding='SAME'):
"""Applies residual function for RevNet.
Args:
x: input tensor
depth1: Number of output channels for the first and second conv layers.
depth2: Number of output channels for the thi... | Applies residual function for RevNet.
Args:
x: input tensor
depth1: Number of output channels for the first and second conv layers.
depth2: Number of output channels for the third conv layer.
dim: '2d' if 2-dimensional, '3d' if 3-dimensional.
first_batch_norm: Whether to keep the first batch norm... |
def build(image, build_path, tag=None, build_args=None, fromline=None, args=[]):
""" build a docker image"""
if tag:
image = ":".join([image, tag])
bdir = tempfile.mkdtemp()
os.system('cp -r {0:s}/* {1:s}'.format(build_path, bdir))
if build_args:
stdw = tempfile.NamedTemporaryFile(... | build a docker image |
def get_initial_arguments(request, cache_id=None):
'Extract initial arguments for the dash app'
if cache_id is None:
return None
if initial_argument_location():
return cache.get(cache_id)
return request.session[cache_id] | Extract initial arguments for the dash app |
def get_coord_box(centre_x, centre_y, distance):
"""Get the square boundary coordinates for a given centre and distance"""
"""Todo: return coordinates inside a circle, rather than a square"""
return {
'top_left': (centre_x - distance, centre_y + distance),
'top_right': (centre_x + distance, ... | Get the square boundary coordinates for a given centre and distance |
def resolve_dependencies(self):
""" evaluate each of the data dependencies of this build target,
returns the resulting dict"""
return dict(
[((key, self.data_dependencies[key])
if type(self.data_dependencies[key]) != DeferredDependency
else (key, s... | evaluate each of the data dependencies of this build target,
returns the resulting dict |
def plot_color_legend(legend, horizontal=False, ax=None):
"""
Plot a pandas Series with labels and colors.
Parameters
----------
legend : pandas.Series
Pandas Series whose values are RGB triples and whose index contains
categorical labels.
horizontal : bool
If True, plo... | Plot a pandas Series with labels and colors.
Parameters
----------
legend : pandas.Series
Pandas Series whose values are RGB triples and whose index contains
categorical labels.
horizontal : bool
If True, plot horizontally.
ax : matplotlib.axis
Axis to plot on.
... |
def handle_legacy_tloc(line: str, position: int, tokens: ParseResults) -> ParseResults:
"""Handle translocations that lack the ``fromLoc`` and ``toLoc`` entries."""
log.log(5, 'legacy translocation statement: %s [%d]', line, position)
return tokens | Handle translocations that lack the ``fromLoc`` and ``toLoc`` entries. |
def check_fam_for_samples(required_samples, source, gold):
"""Check fam files for required_samples."""
# Checking the source panel
source_samples = set()
with open(source, 'r') as input_file:
for line in input_file:
sample = tuple(line.rstrip("\r\n").split(" ")[:2])
if sa... | Check fam files for required_samples. |
def pipe_privateinput(context=None, _INPUT=None, conf=None, **kwargs):
"""An input that prompts the user for some text and yields it forever.
Not loopable.
Parameters
----------
context : pipe2py.Context object
_INPUT : unused
conf : {
'name': {'value': 'parameter name'},
'p... | An input that prompts the user for some text and yields it forever.
Not loopable.
Parameters
----------
context : pipe2py.Context object
_INPUT : unused
conf : {
'name': {'value': 'parameter name'},
'prompt': {'value': 'User prompt'},
'default': {'value': 'default value'... |
def staticEval(self):
"""
Recursively statistically evaluate result of this operator
"""
for o in self.operands:
o.staticEval()
self.result._val = self.evalFn() | Recursively statistically evaluate result of this operator |
def match_score(self, supported: 'Language') -> int:
"""
Suppose that `self` is the language that the user desires, and
`supported` is a language that is actually supported. This method
returns a number from 0 to 100 indicating how similar the supported
language is (higher number... | Suppose that `self` is the language that the user desires, and
`supported` is a language that is actually supported. This method
returns a number from 0 to 100 indicating how similar the supported
language is (higher numbers are better). This is not a symmetric
relation.
The alg... |
def xyzlabel(labelx, labely, labelz):
"""Set all labels at once."""
xlabel(labelx)
ylabel(labely)
zlabel(labelz) | Set all labels at once. |
def lookup(self, topic):
"""Returns producers for a topic."""
nsq.assert_valid_topic_name(topic)
return self._request('GET', '/lookup', fields={'topic': topic}) | Returns producers for a topic. |
def sort(self):
"""Sort by detection time.
.. rubric:: Example
>>> family = Family(
... template=Template(name='a'), detections=[
... Detection(template_name='a', detect_time=UTCDateTime(0) + 200,
... no_chans=8, detect_val=4.2, threshold=1.2,
... | Sort by detection time.
.. rubric:: Example
>>> family = Family(
... template=Template(name='a'), detections=[
... Detection(template_name='a', detect_time=UTCDateTime(0) + 200,
... no_chans=8, detect_val=4.2, threshold=1.2,
... typeo... |
def fill_rect(self, rect):
"""Fill a rectangle on the current rendering target with the drawing color.
Args:
rect (Rect): The destination rectangle, or None to fill the entire rendering target.
Raises:
SDLError: If an error is encountered.
"""
check_int_... | Fill a rectangle on the current rendering target with the drawing color.
Args:
rect (Rect): The destination rectangle, or None to fill the entire rendering target.
Raises:
SDLError: If an error is encountered. |
def persist_booking(booking, user):
"""
Ties an in-progress booking from a session to a user when the user logs in.
If we don't do this, the booking will be lost, because on a login, the
old session will be deleted and a new one will be created. Since the
booking has a FK to the session, it would b... | Ties an in-progress booking from a session to a user when the user logs in.
If we don't do this, the booking will be lost, because on a login, the
old session will be deleted and a new one will be created. Since the
booking has a FK to the session, it would be deleted as well when the user
logs in.
... |
def resolve_args(self, args):
"""
Resolve function call arguments that have object ids
into instances of these objects
"""
def resolve(a):
if isinstance(a, dict):
_id = a.get('i', None)
# If it's a compound type (including dict)
... | Resolve function call arguments that have object ids
into instances of these objects |
def get_uris(self, base_uri, filter_list=None):
"""Return a set of internal URIs."""
return {
re.sub(r'^/', base_uri, link.attrib['href'])
for link in self.parsedpage.get_nodes_by_selector('a')
if 'href' in link.attrib and (
link.attrib['href'].startsw... | Return a set of internal URIs. |
def data(self, data=None):
""" Set response data """
if data is not None:
self.response_model.data = data
return self.response_model.data | Set response data |
def generate_field_spec(row):
""" Generate a set of metadata for each field/column in
the data. This is loosely based on jsontableschema. """
names = set()
fields = []
for cell in row:
name = column_alias(cell, names)
field = {
'name': name,
'title': cell.colu... | Generate a set of metadata for each field/column in
the data. This is loosely based on jsontableschema. |
def execute(self, eopatch):
""" Execute computation of HoG features on input eopatch
:param eopatch: Input eopatch
:type eopatch: eolearn.core.EOPatch
:return: EOPatch instance with new keys holding the HoG features and HoG image for visualisation.
:rtype: eolear... | Execute computation of HoG features on input eopatch
:param eopatch: Input eopatch
:type eopatch: eolearn.core.EOPatch
:return: EOPatch instance with new keys holding the HoG features and HoG image for visualisation.
:rtype: eolearn.core.EOPatch |
def findExtname(fimg, extname, extver=None):
"""
Returns the list number of the extension corresponding to EXTNAME given.
"""
i = 0
extnum = None
for chip in fimg:
hdr = chip.header
if 'EXTNAME' in hdr:
if hdr['EXTNAME'].strip() == extname.upper():
if... | Returns the list number of the extension corresponding to EXTNAME given. |
def _tidy2xhtml5(html):
"""Tidy up a html4/5 soup to a parsable valid XHTML5.
Requires tidy-html5 from https://github.com/w3c/tidy-html5
Installation: http://goo.gl/FG27n
"""
html = _io2string(html)
html = _pre_tidy(html) # Pre-process
xhtml5, errors =\
tidy_document(html,
... | Tidy up a html4/5 soup to a parsable valid XHTML5.
Requires tidy-html5 from https://github.com/w3c/tidy-html5
Installation: http://goo.gl/FG27n |
def emboss_pepstats_parser(infile):
"""Get dictionary of pepstats results.
Args:
infile: Path to pepstats outfile
Returns:
dict: Parsed information from pepstats
TODO:
Only currently parsing the bottom of the file for percentages of properties.
"""
with open(infile) a... | Get dictionary of pepstats results.
Args:
infile: Path to pepstats outfile
Returns:
dict: Parsed information from pepstats
TODO:
Only currently parsing the bottom of the file for percentages of properties. |
def _multiply(self, x1, x2, out):
"""Raw pointwise multiplication of two elements."""
self.tspace._multiply(x1.tensor, x2.tensor, out.tensor) | Raw pointwise multiplication of two elements. |
def dropout_with_broadcast_dims(x, keep_prob, broadcast_dims=None, **kwargs):
"""Like tf.nn.dropout but takes broadcast_dims instead of noise_shape.
Instead of specifying noise_shape, this function takes broadcast_dims -
a list of dimension numbers in which noise_shape should be 1. The random
keep/drop tensor... | Like tf.nn.dropout but takes broadcast_dims instead of noise_shape.
Instead of specifying noise_shape, this function takes broadcast_dims -
a list of dimension numbers in which noise_shape should be 1. The random
keep/drop tensor has dimensionality 1 along these dimensions.
Args:
x: a floating point tens... |
def _main():
"""Display all information sysconfig detains."""
print('Platform: "%s"' % get_platform())
print('Python version: "%s"' % get_python_version())
print('Current installation scheme: "%s"' % _get_default_scheme())
print()
_print_dict('Paths', get_paths())
print()
_print_dict('Va... | Display all information sysconfig detains. |
def extension_by_source(source, mime_type):
"Return the file extension used by this plugin"
# TODO: should get this information from the plugin
extension = source.plugin_name
if extension:
return extension
if mime_type:
return mime_type.split("/")[-1] | Return the file extension used by this plugin |
def configure(self, **configs):
"""Configure the consumer instance
Configuration settings can be passed to constructor,
otherwise defaults will be used:
Keyword Arguments:
bootstrap_servers (list): List of initial broker nodes the consumer
should contact to ... | Configure the consumer instance
Configuration settings can be passed to constructor,
otherwise defaults will be used:
Keyword Arguments:
bootstrap_servers (list): List of initial broker nodes the consumer
should contact to bootstrap initial cluster metadata. This d... |
def update_datetime(value, range = None):
"""
Updates (drifts) a Date value within specified range defined
:param value: a Date value to drift.
:param range: (optional) a range in milliseconds. Default: 10 days
:return: an updated DateTime value.
"""
range = ra... | Updates (drifts) a Date value within specified range defined
:param value: a Date value to drift.
:param range: (optional) a range in milliseconds. Default: 10 days
:return: an updated DateTime value. |
def _from_dict(cls, _dict):
"""Initialize a SpeechRecognitionResults object from a json dictionary."""
args = {}
if 'results' in _dict:
args['results'] = [
SpeechRecognitionResult._from_dict(x)
for x in (_dict.get('results'))
]
if '... | Initialize a SpeechRecognitionResults object from a json dictionary. |
def match_value_to_text(self, text):
"""
this is going to be the tricky bit - probably not possible
to get the 'exact' rating for a value. Will need to do sentiment
analysis of the text to see how it matches the rating. Even that
sounds like it wont work - maybe a ML algorithm wo... | this is going to be the tricky bit - probably not possible
to get the 'exact' rating for a value. Will need to do sentiment
analysis of the text to see how it matches the rating. Even that
sounds like it wont work - maybe a ML algorithm would do it, but
that requires a large body of text... |
def match(record, config=None):
"""Given a record, yield the records in INSPIRE most similar to it.
This method can be used to detect if a record that we are ingesting as a
submission or as an harvest is already present in the system, or to find
out which record a reference should be pointing to.
"... | Given a record, yield the records in INSPIRE most similar to it.
This method can be used to detect if a record that we are ingesting as a
submission or as an harvest is already present in the system, or to find
out which record a reference should be pointing to. |
def bool_assignment(arg, patterns=None):
"""
Summary:
Enforces correct bool argment assignment
Arg:
:arg (*): arg which must be interpreted as either bool True or False
Returns:
bool assignment | TYPE: bool
"""
arg = str(arg) # only eval type str
try:
if p... | Summary:
Enforces correct bool argment assignment
Arg:
:arg (*): arg which must be interpreted as either bool True or False
Returns:
bool assignment | TYPE: bool |
def fix_reference_url(url):
"""Used to parse an incorect url to try to fix it with the most common ocurrences for errors.
If the fixed url is still incorrect, it returns ``None``.
Returns:
String containing the fixed url or the original one if it could not be fixed.
"""
new_url = url
n... | Used to parse an incorect url to try to fix it with the most common ocurrences for errors.
If the fixed url is still incorrect, it returns ``None``.
Returns:
String containing the fixed url or the original one if it could not be fixed. |
def _browse(c):
"""
Open build target's index.html in a browser (using 'open').
"""
index = join(c.sphinx.target, c.sphinx.target_file)
c.run("open {0}".format(index)) | Open build target's index.html in a browser (using 'open'). |
def _prepare_script(self, dest_dir, program):
"""Copy the script into the destination directory.
:param dest_dir: The target directory where the script will be
saved.
:param program: The script text to be saved.
:return: The name of the script file.
:rtype: str
... | Copy the script into the destination directory.
:param dest_dir: The target directory where the script will be
saved.
:param program: The script text to be saved.
:return: The name of the script file.
:rtype: str |
def mclennan_tourky(g, init=None, epsilon=1e-3, max_iter=200,
full_output=False):
r"""
Find one mixed-action epsilon-Nash equilibrium of an N-player normal
form game by the fixed point computation algorithm by McLennan and
Tourky [1]_.
Parameters
----------
g : NormalFor... | r"""
Find one mixed-action epsilon-Nash equilibrium of an N-player normal
form game by the fixed point computation algorithm by McLennan and
Tourky [1]_.
Parameters
----------
g : NormalFormGame
NormalFormGame instance.
init : array_like(int or array_like(float, ndim=1)), optional
... |
def stage_all(self):
"""
Stages all changed and untracked files
"""
LOGGER.info('Staging all files')
self.repo.git.add(A=True) | Stages all changed and untracked files |
def init():
"""
Initialize synchronously.
"""
loop = asyncio.get_event_loop()
if loop.is_running():
raise Exception("You must initialize the Ray async API by calling "
"async_api.init() or async_api.as_future(obj) before "
"the event loop start... | Initialize synchronously. |
def from_api(cls, api):
"""
create an application description for the todo app,
that based on the api can use either tha api or the ux for interaction
"""
ux = TodoUX(api)
from .pseudorpc import PseudoRpc
rpc = PseudoRpc(api)
return cls({ViaAPI: api, Via... | create an application description for the todo app,
that based on the api can use either tha api or the ux for interaction |
def describe_field(k, v, timestamp_parser=default_timestamp_parser):
"""Given a key representing a column name and value representing the value
stored in the column, return a representation of the BigQuery schema
element describing that field. Raise errors if invalid value types are
provided.
Param... | Given a key representing a column name and value representing the value
stored in the column, return a representation of the BigQuery schema
element describing that field. Raise errors if invalid value types are
provided.
Parameters
----------
k : Union[str, unicode]
Key representing th... |
def urlretrieve(url, filename=None, reporthook=None, data=None):
"""
Retrieve a URL into a temporary location on disk.
Requires a URL argument. If a filename is passed, it is used as
the temporary file location. The reporthook argument should be
a callable that accepts a block number, a read size, ... | Retrieve a URL into a temporary location on disk.
Requires a URL argument. If a filename is passed, it is used as
the temporary file location. The reporthook argument should be
a callable that accepts a block number, a read size, and the
total file size of the URL target. The data argument should be
... |
def restore(self):
"""Restore snapshotted state."""
if not self._snapshot:
return
yield from self.set_muted(self._snapshot['muted'])
yield from self.set_volume(self._snapshot['volume'])
yield from self.set_stream(self._snapshot['stream'])
self.callback()
... | Restore snapshotted state. |
def to_XML(self, xml_declaration=True, xmlns=True):
"""
Dumps object fields to an XML-formatted string. The 'xml_declaration'
switch enables printing of a leading standard XML line containing XML
version and encoding. The 'xmlns' switch enables printing of qualified
XMLNS prefix... | Dumps object fields to an XML-formatted string. The 'xml_declaration'
switch enables printing of a leading standard XML line containing XML
version and encoding. The 'xmlns' switch enables printing of qualified
XMLNS prefixes.
:param XML_declaration: if ``True`` (default) prints a lead... |
def include(self, spec, *,
basePath=None,
operationId_mapping=None,
name=None):
""" Adds a new specification to a router
:param spec: path to specification
:param basePath: override base path specify in specification
:param operationId_map... | Adds a new specification to a router
:param spec: path to specification
:param basePath: override base path specify in specification
:param operationId_mapping: mapping for handlers
:param name: name to access original spec |
def _get_u16(self, msb, lsb):
"""
Convert 2 bytes into an unsigned int.
"""
buf = struct.pack('>BB', self._get_u8(msb), self._get_u8(lsb))
return int(struct.unpack('>H', buf)[0]) | Convert 2 bytes into an unsigned int. |
def validate_param_name(name, param_type):
"""Validate that the name follows posix conventions for env variables."""
# http://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap03.html#tag_03_235
#
# 3.235 Name
# In the shell command language, a word consisting solely of underscores,
# digits, and alp... | Validate that the name follows posix conventions for env variables. |
def _set_adj_type(self, v, load=False):
"""
Setter method for adj_type, mapped from YANG variable /adj_neighbor_entries_state/adj_neighbor/adj_type (isis-adj-type)
If this variable is read-only (config: false) in the
source YANG file, then _set_adj_type is considered as a private
method. Backends lo... | Setter method for adj_type, mapped from YANG variable /adj_neighbor_entries_state/adj_neighbor/adj_type (isis-adj-type)
If this variable is read-only (config: false) in the
source YANG file, then _set_adj_type is considered as a private
method. Backends looking to populate this variable should
do so via... |
def purge(self):
"""
Clean old transactions
"""
while not self.stopped.isSet():
self.stopped.wait(timeout=defines.EXCHANGE_LIFETIME)
self._messageLayer.purge() | Clean old transactions |
def build_authorization_arg(authdict):
"""
Create an "Authorization" header value from an authdict (created by generate_response()).
"""
vallist = []
for k in authdict.keys():
vallist += ['%s=%s' % (k,authdict[k])]
return 'Digest '+', '.join(vallist) | Create an "Authorization" header value from an authdict (created by generate_response()). |
def error(code, message, **kwargs):
"""Call this to raise an exception and have it stored in the journal"""
assert code in Logger._error_code_to_exception
exc_type, domain = Logger._error_code_to_exception[code]
exc = exc_type(message, **kwargs)
Logger._log(code, exc.message, ERR... | Call this to raise an exception and have it stored in the journal |
def register_view(self, view):
"""Called when the View was registered"""
super(TopToolBarUndockedWindowController, self).register_view(view)
view['redock_button'].connect('clicked', self.on_redock_button_clicked) | Called when the View was registered |
def _render_content(self, content, **settings):
"""
Perform widget rendering, but do not print anything.
"""
result = []
columns = settings[self.SETTING_COLUMNS]
# Format each table cell into string.
(columns, content) = self.table_format(columns, content)
... | Perform widget rendering, but do not print anything. |
def to_cell_table(self, merged=True):
"""Returns a list of lists of Cells with the cooked value and note for each cell."""
new_rows = []
for row_index, row in enumerate(self.rows(CellMode.cooked)):
new_row = []
for col_index, cell_value in enumerate(row):
new_row.append(Cell(cell_value, ... | Returns a list of lists of Cells with the cooked value and note for each cell. |
def convert_dcm2nii(input_dir, output_dir, filename):
""" Call MRICron's `dcm2nii` to convert the DICOM files inside `input_dir`
to Nifti and save the Nifti file in `output_dir` with a `filename` prefix.
Parameters
----------
input_dir: str
Path to the folder that contains the DICOM files
... | Call MRICron's `dcm2nii` to convert the DICOM files inside `input_dir`
to Nifti and save the Nifti file in `output_dir` with a `filename` prefix.
Parameters
----------
input_dir: str
Path to the folder that contains the DICOM files
output_dir: str
Path to the folder where to save t... |
def main(**options):
"""Spline loc tool."""
application = Application(**options)
# fails application when your defined threshold is higher than your ratio of com/loc.
if not application.run():
sys.exit(1)
return application | Spline loc tool. |
def list(context, sort, limit, where, verbose):
"""list(context, sort, limit, where, verbose)
List all products.
>>> dcictl product list
:param string sort: Field to apply sort
:param integer limit: Max number of rows to return
:param string where: An optional filter criteria
:param boole... | list(context, sort, limit, where, verbose)
List all products.
>>> dcictl product list
:param string sort: Field to apply sort
:param integer limit: Max number of rows to return
:param string where: An optional filter criteria
:param boolean verbose: Display verbose output |
def write_block_data(self, addr, cmd, vals):
"""write_block_data(addr, cmd, vals)
Perform SMBus Write Block Data transaction.
"""
self._set_addr(addr)
data = ffi.new("union i2c_smbus_data *")
list_to_smbus_data(data, vals)
if SMBUS.i2c_smbus_access(self._fd,
... | write_block_data(addr, cmd, vals)
Perform SMBus Write Block Data transaction. |
def _direct_set(self, key, value):
'''
_direct_set - INTERNAL USE ONLY!!!!
Directly sets a value on the underlying dict, without running through the setitem logic
'''
dict.__setitem__(self, key, value)
return value | _direct_set - INTERNAL USE ONLY!!!!
Directly sets a value on the underlying dict, without running through the setitem logic |
def shuffle_into_deck(self):
"""
Shuffle the card into the controller's deck
"""
return self.game.cheat_action(self, [actions.Shuffle(self.controller, self)]) | Shuffle the card into the controller's deck |
def to_json(data):
"""Return data as a JSON string."""
return json.dumps(data, default=lambda x: x.__dict__, sort_keys=True, indent=4) | Return data as a JSON string. |
def metadata_updated_on(item):
"""Extracts and coverts the update time from a Bugzilla item.
The timestamp is extracted from 'delta_ts' field. This date is
converted to UNIX timestamp format. Due Bugzilla servers ignore
the timezone on HTTP requests, it will be ignored during the
... | Extracts and coverts the update time from a Bugzilla item.
The timestamp is extracted from 'delta_ts' field. This date is
converted to UNIX timestamp format. Due Bugzilla servers ignore
the timezone on HTTP requests, it will be ignored during the
conversion, too.
:param item: i... |
def private_method(func):
"""Decorator for making an instance method private."""
def func_wrapper(*args, **kwargs):
"""Decorator wrapper function."""
outer_frame = inspect.stack()[1][0]
if 'self' not in outer_frame.f_locals or outer_frame.f_locals['self'] is not args[0]:
rai... | Decorator for making an instance method private. |
def _add_versions(samples):
"""Add tool and data versions to the summary.
"""
samples[0]["versions"] = {"tools": programs.write_versions(samples[0]["dirs"], samples[0]["config"]),
"data": provenancedata.write_versions(samples[0]["dirs"], samples)}
return samples | Add tool and data versions to the summary. |
def _sibpath(path, sibling):
"""
Return the path to a sibling of a file in the filesystem.
This is useful in conjunction with the special C{__file__} attribute
that Python provides for modules, so modules can load associated
resource files.
(Stolen from twisted.python.util)
"""
return ... | Return the path to a sibling of a file in the filesystem.
This is useful in conjunction with the special C{__file__} attribute
that Python provides for modules, so modules can load associated
resource files.
(Stolen from twisted.python.util) |
def convert_concat(params, w_name, scope_name, inputs, layers, weights, names):
"""
Convert concatenation.
Args:
params: dictionary with layer parameters
w_name: name prefix in state_dict
scope_name: pytorch scope name
inputs: pytorch node inputs
layers: dictionary w... | Convert concatenation.
Args:
params: dictionary with layer parameters
w_name: name prefix in state_dict
scope_name: pytorch scope name
inputs: pytorch node inputs
layers: dictionary with keras tensors
weights: pytorch state_dict
names: use short names for ker... |
def fit(self, X, y=None, sample_weight=None):
"""Compute k-means clustering.
Parameters
----------
X : array-like or sparse matrix, shape=(n_samples, n_features)
y : Ignored
not used, present here for API consistency by convention.
sample_weight : array-li... | Compute k-means clustering.
Parameters
----------
X : array-like or sparse matrix, shape=(n_samples, n_features)
y : Ignored
not used, present here for API consistency by convention.
sample_weight : array-like, shape (n_samples,), optional
The weights ... |
def get_lines(self):
"""Gets lines in file
:return: Lines in file
"""
with open(self.path, "r") as data:
self.lines = data.readlines() # store data in arrays
return self.lines | Gets lines in file
:return: Lines in file |
def reset(self, total_size=None):
"""Remove all file system contents and reset the root."""
self.root = FakeDirectory(self.path_separator, filesystem=self)
self.cwd = self.root.name
self.open_files = []
self._free_fd_heap = []
self._last_ino = 0
self._last_dev = ... | Remove all file system contents and reset the root. |
def build_path(G, node, endpoints, path):
"""
Recursively build a path of nodes until you hit an endpoint node.
Parameters
----------
G : networkx multidigraph
node : int
the current node to start from
endpoints : set
the set of all nodes in the graph that are endpoints
... | Recursively build a path of nodes until you hit an endpoint node.
Parameters
----------
G : networkx multidigraph
node : int
the current node to start from
endpoints : set
the set of all nodes in the graph that are endpoints
path : list
the list of nodes in order in the ... |
def operates_on(self, qubits: Iterable[raw_types.Qid]) -> bool:
"""Determines if the moment has operations touching the given qubits.
Args:
qubits: The qubits that may or may not be touched by operations.
Returns:
Whether this moment has operations involving the qubits.... | Determines if the moment has operations touching the given qubits.
Args:
qubits: The qubits that may or may not be touched by operations.
Returns:
Whether this moment has operations involving the qubits. |
def nested_genobject(self, metadata, attr, datastore):
"""
Allow for the printing of nested GenObjects
:param metadata: Nested dictionary containing the metadata. Will be further populated by this method
:param attr: Current attribute being evaluated. Must be a GenObject e.g. sample.gene... | Allow for the printing of nested GenObjects
:param metadata: Nested dictionary containing the metadata. Will be further populated by this method
:param attr: Current attribute being evaluated. Must be a GenObject e.g. sample.general
:param datastore: The dictionary of the current attribute. Will... |
def truncate(s, max_len=20, ellipsis='...'):
r"""Return string at most `max_len` characters or sequence elments appended with the `ellipsis` characters
>>> truncate(OrderedDict(zip(list('ABCDEFGH'), range(8))), 1)
"{'A': 0..."
>>> truncate(list(range(5)), 3)
'[0, 1, 2...'
>>> truncate(np.arange... | r"""Return string at most `max_len` characters or sequence elments appended with the `ellipsis` characters
>>> truncate(OrderedDict(zip(list('ABCDEFGH'), range(8))), 1)
"{'A': 0..."
>>> truncate(list(range(5)), 3)
'[0, 1, 2...'
>>> truncate(np.arange(5), 3)
'[0, 1, 2...'
>>> truncate('Too v... |
def dependent_hosted_number_orders(self):
"""
Access the dependent_hosted_number_orders
:returns: twilio.rest.preview.hosted_numbers.authorization_document.dependent_hosted_number_order.DependentHostedNumberOrderList
:rtype: twilio.rest.preview.hosted_numbers.authorization_document.depe... | Access the dependent_hosted_number_orders
:returns: twilio.rest.preview.hosted_numbers.authorization_document.dependent_hosted_number_order.DependentHostedNumberOrderList
:rtype: twilio.rest.preview.hosted_numbers.authorization_document.dependent_hosted_number_order.DependentHostedNumberOrderList |
def check_if_ok_to_update(self):
"""Check if it is ok to perform an http request."""
current_time = int(time.time())
last_refresh = self.last_refresh
if last_refresh is None:
last_refresh = 0
if current_time >= (last_refresh + self.refresh_rate):
return Tr... | Check if it is ok to perform an http request. |
def multiplication_circuit(nbit, vartype=dimod.BINARY):
"""Multiplication circuit constraint satisfaction problem.
A constraint satisfaction problem that represents the binary multiplication :math:`ab=p`,
where the multiplicands are binary variables of length `nbit`; for example,
:math:`a_0 + 2a_1 + 4a... | Multiplication circuit constraint satisfaction problem.
A constraint satisfaction problem that represents the binary multiplication :math:`ab=p`,
where the multiplicands are binary variables of length `nbit`; for example,
:math:`a_0 + 2a_1 + 4a_2 +... +2^ma_{nbit}`.
The square below shows a graphic re... |
def _construct_deutsch_jozsa_circuit(self):
"""
Builds the Deutsch-Jozsa circuit. Which can determine whether a function f mapping
:math:`\{0,1\}^n \to \{0,1\}` is constant or balanced, provided that it is one of them.
:return: A program corresponding to the desired instance of Deutsch ... | Builds the Deutsch-Jozsa circuit. Which can determine whether a function f mapping
:math:`\{0,1\}^n \to \{0,1\}` is constant or balanced, provided that it is one of them.
:return: A program corresponding to the desired instance of Deutsch Jozsa's Algorithm.
:rtype: Program |
def get_ambient_sensor_data(self):
"""Refresh ambient sensor history"""
resource = 'cameras/{}/ambientSensors/history'.format(self.device_id)
history_event = self.publish_and_get_event(resource)
if history_event is None:
return None
properties = history_event.get('p... | Refresh ambient sensor history |
def clean(self):
"""
Final validations of model fields.
1. Validate that selected site for enterprise customer matches with the selected identity provider's site.
"""
super(EnterpriseCustomerIdentityProviderAdminForm, self).clean()
provider_id = self.cleaned_data.get('p... | Final validations of model fields.
1. Validate that selected site for enterprise customer matches with the selected identity provider's site. |
def process_directory_statements_sorted_by_pmid(directory_name):
"""Processes a directory filled with CSXML files, first normalizing the
character encoding to utf-8, and then processing into INDRA statements
sorted by pmid.
Parameters
----------
directory_name : str
The name of a direct... | Processes a directory filled with CSXML files, first normalizing the
character encoding to utf-8, and then processing into INDRA statements
sorted by pmid.
Parameters
----------
directory_name : str
The name of a directory filled with csxml files to process
Returns
-------
pmid... |
def get_lowest_numeric_score_metadata(self):
"""Gets the metadata for the lowest numeric score.
return: (osid.Metadata) - metadata for the lowest numeric score
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.Resource... | Gets the metadata for the lowest numeric score.
return: (osid.Metadata) - metadata for the lowest numeric score
*compliance: mandatory -- This method must be implemented.* |
def unpickle(self, parent):
"""Sets the parent pointer references for the module *and* all of its
child classes that also have pointer references."""
self.parent = parent
self._unpickle_collection(self.members)
self._unpickle_collection(self.dependencies)
self._unpickle_c... | Sets the parent pointer references for the module *and* all of its
child classes that also have pointer references. |
def parse_log_entry(text):
"""This function does all real job on log line parsing.
it setup two cases for restart parsing if a line
with wrong format was found.
Restarts:
- use_value: just retuns an object it was passed. This can
be any value.
- reparse: calls `parse_log_entry` again with... | This function does all real job on log line parsing.
it setup two cases for restart parsing if a line
with wrong format was found.
Restarts:
- use_value: just retuns an object it was passed. This can
be any value.
- reparse: calls `parse_log_entry` again with other text value.
Beware, t... |
def _to_reddit_list(arg):
"""Return an argument converted to a reddit-formatted list.
The returned format is a comma deliminated list. Each element is a string
representation of an object. Either given as a string or as an object that
is then converted to its string representation.
"""
if (isin... | Return an argument converted to a reddit-formatted list.
The returned format is a comma deliminated list. Each element is a string
representation of an object. Either given as a string or as an object that
is then converted to its string representation. |
def flg(self, name, help, abbrev=None):
"""Describe a flag"""
abbrev = abbrev or '-' + name[0]
longname = '--' + name.replace('_', '-')
self._add(name, abbrev, longname, action='store_true', help=help) | Describe a flag |
def _client_properties():
"""AMQPStorm Client Properties.
:rtype: dict
"""
return {
'product': 'AMQPStorm',
'platform': 'Python %s (%s)' % (platform.python_version(),
platform.python_implementation()),
'capa... | AMQPStorm Client Properties.
:rtype: dict |
def update(self):
"""Update RAID stats using the input method."""
# Init new stats
stats = self.get_init_value()
if import_error_tag:
return self.stats
if self.input_method == 'local':
# Update stats using the PyMDstat lib (https://github.com/nicolargo/p... | Update RAID stats using the input method. |
def get_songs()->Iterator:
"""
Return songs that have the fingerprinted flag set TRUE (1).
"""
with session_withcommit() as session:
val = session.query(songs).all()
for row in val:
yield row | Return songs that have the fingerprinted flag set TRUE (1). |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.