code stringlengths 75 104k | docstring stringlengths 1 46.9k |
|---|---|
def cli(env, identifier, uri, ibm_api_key):
"""Export an image to object storage.
The URI for an object storage object (.vhd/.iso file) of the format:
swift://<objectStorageAccount>@<cluster>/<container>/<objectPath>
or cos://<regionName>/<bucketName>/<objectPath> if using IBM Cloud
Object Storage
... | Export an image to object storage.
The URI for an object storage object (.vhd/.iso file) of the format:
swift://<objectStorageAccount>@<cluster>/<container>/<objectPath>
or cos://<regionName>/<bucketName>/<objectPath> if using IBM Cloud
Object Storage |
def process(self, job_id):
"""
Process a job by the queue
"""
self._logger.info(
'{:.2f}: Process job {}'.format(self._env.now, job_id)
)
# log time of commencement of service
self._observer.notify_service(time=self._env.now, job_id=job_id)
... | Process a job by the queue |
def localize_fieldnames(fields, internationalized_fields):
"""
Given a list of fields and a list of field names that
are internationalized, will return a list with
all internationalized fields properly localized.
>>> from django.utils.translation import activate
>>> activate('en-us')
>>... | Given a list of fields and a list of field names that
are internationalized, will return a list with
all internationalized fields properly localized.
>>> from django.utils.translation import activate
>>> activate('en-us')
>>> localize_fieldnames(['name', 'title', 'url'], ['title'])
['name',... |
def derivative(self, x):
"""Return the derivative at ``x``.
The derivative of the right scalar operator multiplication
follows the chain rule:
``OperatorRightScalarMult(op, s).derivative(y) ==
OperatorLeftScalarMult(op.derivative(s * y), s)``
Parameters
... | Return the derivative at ``x``.
The derivative of the right scalar operator multiplication
follows the chain rule:
``OperatorRightScalarMult(op, s).derivative(y) ==
OperatorLeftScalarMult(op.derivative(s * y), s)``
Parameters
----------
x : `domain` `el... |
def as_xml(self):
"""Return XML serialization of this list.
This code does not support the case where the list is too big for
a single XML document.
"""
self.default_capability()
s = self.new_sitemap()
return s.resources_as_xml(self, sitemapindex=self.sitemapinde... | Return XML serialization of this list.
This code does not support the case where the list is too big for
a single XML document. |
def read_namespaced_pod_status(self, name, namespace, **kwargs): # noqa: E501
"""read_namespaced_pod_status # noqa: E501
read status of the specified Pod # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=... | read_namespaced_pod_status # noqa: E501
read status of the specified Pod # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.read_namespaced_pod_status(name, namespace, async_req=True)
... |
def clear_display_name(self):
"""Clears the display name.
raise: NoAccess - ``display_name`` cannot be modified
*compliance: mandatory -- This method must be implemented.*
"""
if (self.get_display_name_metadata().is_read_only() or
self.get_display_name_metadata... | Clears the display name.
raise: NoAccess - ``display_name`` cannot be modified
*compliance: mandatory -- This method must be implemented.* |
def save_data(self):
"""Save data"""
title = _( "Save profiler result")
filename, _selfilter = getsavefilename(
self, title, getcwd_or_home(),
_("Profiler result")+" (*.Result)")
if filename:
self.datatree.save_data(filename) | Save data |
def find_device(self, service_uuids=[], name=None, timeout_sec=TIMEOUT_SEC):
"""Return the first device that advertises the specified service UUIDs or
has the specified name. Will wait up to timeout_sec seconds for the device
to be found, and if the timeout is zero then it will not wait at all a... | Return the first device that advertises the specified service UUIDs or
has the specified name. Will wait up to timeout_sec seconds for the device
to be found, and if the timeout is zero then it will not wait at all and
immediately return a result. When no device is found a value of None is
... |
def MI_referenceNames(self,
env,
objectName,
resultClassName,
role):
# pylint: disable=invalid-name
"""Return instance names of an association class.
Implements the WBEM operation ReferenceNa... | Return instance names of an association class.
Implements the WBEM operation ReferenceNames in terms
of the references method. A derived class will not normally
override this method. |
def _build_latex_array(self, aliases=None):
"""Returns an array of strings containing \\LaTeX for this circuit.
If aliases is not None, aliases contains a dict mapping
the current qubits in the circuit to new qubit names.
We will deduce the register names and sizes from aliases.
... | Returns an array of strings containing \\LaTeX for this circuit.
If aliases is not None, aliases contains a dict mapping
the current qubits in the circuit to new qubit names.
We will deduce the register names and sizes from aliases. |
def get_value(self, index):
"""Return current value"""
if index.column() == 0:
return self.keys[ index.row() ]
elif index.column() == 1:
return self.types[ index.row() ]
elif index.column() == 2:
return self.sizes[ index.row() ]
else:
... | Return current value |
def dot(x_gpu, y_gpu, transa='N', transb='N', handle=None, target=None):
"""
Dot product of two arrays.
For 1D arrays, this function computes the inner product. For 2D
arrays of shapes `(m, k)` and `(k, n)`, it computes the matrix
product; the result has shape `(m, n)`.
Parameters
--------... | Dot product of two arrays.
For 1D arrays, this function computes the inner product. For 2D
arrays of shapes `(m, k)` and `(k, n)`, it computes the matrix
product; the result has shape `(m, n)`.
Parameters
----------
x_gpu : pycuda.gpuarray.GPUArray
Input array.
y_gpu : pycuda.gpuar... |
def get_type(full_path):
"""Get the type (socket, file, dir, symlink, ...) for the provided path"""
status = {'type': []}
if os.path.ismount(full_path):
status['type'] += ['mount-point']
elif os.path.islink(full_path):
status['type'] += ['symlink']
if os.path.isfile(full_path):
... | Get the type (socket, file, dir, symlink, ...) for the provided path |
def start_server(app: web.Application = None, port: int = None,
address: str = None, **kwargs: Any) -> HTTPServer:
"""Start server with ``app`` on ``localhost:port``.
If port is not specified, use command line option of ``--port``.
"""
app = app or get_app()
port = port if port is ... | Start server with ``app`` on ``localhost:port``.
If port is not specified, use command line option of ``--port``. |
def plotGrid(self, numLines=(5,5), lineWidth=1, colour="#777777"):
"""Plot NUMLINES[0] vertical gridlines and NUMLINES[1] horizontal gridlines,
while keeping the initial axes bounds that were present upon its calling.
Will not work for certain cases.
"""
x1, x2, y1, y2 = mp.axis(... | Plot NUMLINES[0] vertical gridlines and NUMLINES[1] horizontal gridlines,
while keeping the initial axes bounds that were present upon its calling.
Will not work for certain cases. |
def prepend_string_list(self, key, value, max_length_key):
"""Prepend a fixed-length string list with a new string.
The oldest string will be removed from the list. If the string is
already in the list, it is shuffled to the top. Use this to implement
things like a 'most recent files' e... | Prepend a fixed-length string list with a new string.
The oldest string will be removed from the list. If the string is
already in the list, it is shuffled to the top. Use this to implement
things like a 'most recent files' entry. |
def token(cls: Type[ConditionType], left: Any, op: Optional[Any] = None,
right: Optional[Any] = None) -> ConditionType:
"""
Return Condition instance from arguments and Operator
:param left: Left argument
:param op: Operator
:param right: Right argument
:re... | Return Condition instance from arguments and Operator
:param left: Left argument
:param op: Operator
:param right: Right argument
:return: |
def missing_any(da, freq, **kwds):
r"""Return a boolean DataArray indicating whether there are missing days in the resampled array.
Parameters
----------
da : DataArray
Input array at daily frequency.
freq : str
Resampling frequency.
Returns
-------
out : DataArray
A ... | r"""Return a boolean DataArray indicating whether there are missing days in the resampled array.
Parameters
----------
da : DataArray
Input array at daily frequency.
freq : str
Resampling frequency.
Returns
-------
out : DataArray
A boolean array set to True if any month ... |
def get_snapshot_policies(self, view=None):
"""
Retrieve a list of snapshot policies.
@param view: View to materialize. Valid values are 'full', 'summary', 'export', 'export_redacted'.
@return: A list of snapshot policies.
@since: API v6
"""
return self._get("snapshots/policies", ApiSnapsho... | Retrieve a list of snapshot policies.
@param view: View to materialize. Valid values are 'full', 'summary', 'export', 'export_redacted'.
@return: A list of snapshot policies.
@since: API v6 |
def get_placeholder_image(width, height, name=None, fg_color=get_color('black'),
bg_color=get_color('grey'), text=None, font=u'Verdana.ttf',
fontsize=42, encoding=u'unic', mode='RGBA', fmt=u'PNG'):
"""Little spin-off from https://github.com/Visgean/python-placeholder
that not saves an image and ... | Little spin-off from https://github.com/Visgean/python-placeholder
that not saves an image and instead returns it. |
def get_pressure(self):
"""
Returns the pressure in Millibars
"""
self._init_pressure() # Ensure pressure sensor is initialised
pressure = 0
data = self._pressure.pressureRead()
if (data[0]): # Pressure valid
pressure = data[1]
return pressu... | Returns the pressure in Millibars |
def _construct_role(self, managed_policy_map):
"""Constructs a Lambda execution role based on this SAM function's Policies property.
:returns: the generated IAM Role
:rtype: model.iam.IAMRole
"""
execution_role = IAMRole(self.logical_id + 'Role', attributes=self.get_passthrough_... | Constructs a Lambda execution role based on this SAM function's Policies property.
:returns: the generated IAM Role
:rtype: model.iam.IAMRole |
def CopyToProto(self, proto):
"""Copies this to the matching proto in descriptor_pb2.
Args:
proto: An empty proto instance from descriptor_pb2.
Raises:
Error: If self couldnt be serialized, due to to few constructor arguments.
"""
if (self.file is not None and
self._serialized_... | Copies this to the matching proto in descriptor_pb2.
Args:
proto: An empty proto instance from descriptor_pb2.
Raises:
Error: If self couldnt be serialized, due to to few constructor arguments. |
def get_region_nt_counts(region, bam, stranded=False):
"""
Get counts of each nucleotide from a bam file for a given region. If R1 and
R2 reads both overlap a position, only one count will be added. If the R1
and R2 reads disagree at a position they both overlap, that read pair is not
used for that ... | Get counts of each nucleotide from a bam file for a given region. If R1 and
R2 reads both overlap a position, only one count will be added. If the R1
and R2 reads disagree at a position they both overlap, that read pair is not
used for that position. Can optionally output strand-specific counts.
Param... |
def is_rootlevel(self):
"""
Determine if the Activity is at the root level of a project.
It will look for the name of the parent which should be either ActivityRootNames.WORKFLOW_ROOT or
ActivityRootNames.CATALOG_ROOT. If the name of the parent cannot be found an additional API call is ... | Determine if the Activity is at the root level of a project.
It will look for the name of the parent which should be either ActivityRootNames.WORKFLOW_ROOT or
ActivityRootNames.CATALOG_ROOT. If the name of the parent cannot be found an additional API call is made
to retrieve the parent object (... |
def update(self, friendly_name=values.unset,
default_service_role_sid=values.unset,
default_channel_role_sid=values.unset,
default_channel_creator_role_sid=values.unset,
read_status_enabled=values.unset, reachability_enabled=values.unset,
typing... | Update the ServiceInstance
:param unicode friendly_name: A string to describe the resource
:param unicode default_service_role_sid: The service role assigned to users when they are added to the service
:param unicode default_channel_role_sid: The channel role assigned to users when they are add... |
def _load_model(self):
"""Loads robot and optionally add grippers."""
super()._load_model()
self.mujoco_robot = Baxter()
if self.has_gripper_right:
self.gripper_right = gripper_factory(self.gripper_right_name)
if not self.gripper_visualization:
sel... | Loads robot and optionally add grippers. |
def get_default_cassandra_connection():
"""
Return first default cassandra connection
:return:
"""
for alias, conn in get_cassandra_connections():
if conn.connection.default:
return alias, conn
return list(get_cassandra_connections())[0] | Return first default cassandra connection
:return: |
def _set_people(self, people):
""" Sets who the object is sent to """
if hasattr(people, "object_type"):
people = [people]
elif hasattr(people, "__iter__"):
people = list(people)
return people | Sets who the object is sent to |
def _simplify_arguments(arguments):
"""
If positional or keyword arguments are empty return only one or the other.
"""
if len(arguments.args) == 0:
return arguments.kwargs
elif len(arguments.kwargs) == 0:
return arguments.args
else:
return arguments | If positional or keyword arguments are empty return only one or the other. |
def add(self, new_results):
""" Add new benchmark results. """
for result in new_results:
result.update(self.context)
self.results = self.results.append(result, ignore_index=True) | Add new benchmark results. |
def format_number_field(__, prec, number, locale):
"""Formats a number field."""
prec = NUMBER_DECIMAL_DIGITS if prec is None else int(prec)
locale = Locale.parse(locale)
pattern = locale.decimal_formats.get(None)
return pattern.apply(number, locale, force_frac=(prec, prec)) | Formats a number field. |
def __ComputeEndByte(self, start, end=None, use_chunks=True):
"""Compute the last byte to fetch for this request.
This is all based on the HTTP spec for Range and
Content-Range.
Note that this is potentially confusing in several ways:
* the value for the last byte is 0-based,... | Compute the last byte to fetch for this request.
This is all based on the HTTP spec for Range and
Content-Range.
Note that this is potentially confusing in several ways:
* the value for the last byte is 0-based, eg "fetch 10 bytes
from the beginning" would return 9 here.
... |
def run_ut_python3_qemu_internal():
"""this runs inside the vm"""
pkg = glob.glob('mxnet_dist/*.whl')[0]
logging.info("=== NOW Running inside QEMU ===")
logging.info("PIP Installing %s", pkg)
check_call(['sudo', 'pip3', 'install', pkg])
logging.info("PIP Installing mxnet/test_requirements.txt") ... | this runs inside the vm |
def do_status(self, arg):
''' Print information about the arm. '''
info = self.arm.get_info()
max_len = len(max(info.keys(), key=len))
print(self.style.theme('\nArm Status'))
for key, value in info.items():
print(self.style.help(key.ljust(max_len + 2), str(value)))
... | Print information about the arm. |
def parse(self, buf: memoryview, params: Params) \
-> Tuple[Command, memoryview]:
"""Parse the given bytes into a command. The basic syntax is a tag
string, a command name, possibly some arguments, and then an endline.
If the command has a complete structure but cannot be parsed, an
... | Parse the given bytes into a command. The basic syntax is a tag
string, a command name, possibly some arguments, and then an endline.
If the command has a complete structure but cannot be parsed, an
:class:`InvalidCommand` is returned.
Args:
buf: The bytes to parse.
... |
def object_to_json(obj):
"""Convert object that cannot be natively serialized by python to JSON representation."""
if isinstance(obj, (datetime.datetime, datetime.date, datetime.time)):
return obj.isoformat()
return str(obj) | Convert object that cannot be natively serialized by python to JSON representation. |
def multi_p_run(tot_num, _func, worker, params, n_process):
"""
Run _func with multi-process using params.
"""
from multiprocessing import Process, Queue
out_q = Queue()
procs = []
split_num = split_seq(list(range(0, tot_num)), n_process)
print(tot_num, ">>", split_num)
split_len ... | Run _func with multi-process using params. |
def _get(self, uri):
"""
Handles the communication with the API when getting
a specific resource managed by this class.
"""
resp, resp_body = self.api.method_get(uri)
return self.resource_class(self, resp_body, self.response_key,
loaded=True) | Handles the communication with the API when getting
a specific resource managed by this class. |
def response(self):
"""
Dictionary of public and private, hostnames and ips.
:rtype: dict
"""
describe_request_params = {}
if self.filter is not None:
if type(self.filter) is not dict:
try:
filters = json.loads(self.filter)
... | Dictionary of public and private, hostnames and ips.
:rtype: dict |
def _LinearMapByteStream(
self, byte_stream, byte_offset=0, context=None, **unused_kwargs):
"""Maps a data type sequence on a byte stream.
Args:
byte_stream (bytes): byte stream.
byte_offset (Optional[int]): offset into the byte stream where to start.
context (Optional[DataTypeMapContex... | Maps a data type sequence on a byte stream.
Args:
byte_stream (bytes): byte stream.
byte_offset (Optional[int]): offset into the byte stream where to start.
context (Optional[DataTypeMapContext]): data type map context.
Returns:
tuple[object, ...]: mapped values.
Raises:
Map... |
def codes2unicode(codes, composed=True):
''' Convert Hanyang-PUA code iterable to Syllable-Initial-Peak-Final
encoded unicode string.
:param codes:
an iterable of Hanyang-PUA code
:param composed:
the result should be composed as much as possible (default True)
:return: Syllable-Ini... | Convert Hanyang-PUA code iterable to Syllable-Initial-Peak-Final
encoded unicode string.
:param codes:
an iterable of Hanyang-PUA code
:param composed:
the result should be composed as much as possible (default True)
:return: Syllable-Initial-Peak-Final encoded unicode string |
def write(self, s):
"""
Write wrapper.
Parameters
----------
s : bytes
Bytes to write
"""
try:
self._write_lock.acquire()
self.handle.sendall(s)
except socket.timeout:
self._connect()
except socket.err... | Write wrapper.
Parameters
----------
s : bytes
Bytes to write |
def _write_current_buffer_for_group_key(self, key):
"""
Find the buffer for a given group key, prepare it to be written
and writes it calling write() method.
"""
write_info = self.write_buffer.pack_buffer(key)
self.write(write_info.get('file_path'),
sel... | Find the buffer for a given group key, prepare it to be written
and writes it calling write() method. |
def mode_string_v10(msg):
'''mode string for 1.0 protocol, from heartbeat'''
if msg.autopilot == mavlink.MAV_AUTOPILOT_PX4:
return interpret_px4_mode(msg.base_mode, msg.custom_mode)
if not msg.base_mode & mavlink.MAV_MODE_FLAG_CUSTOM_MODE_ENABLED:
return "Mode(0x%08x)" % msg.base_mode
if... | mode string for 1.0 protocol, from heartbeat |
def find_features(seqs, locus_tag="all", utr_len=200):
"""Find features in sequences by locus tag"""
found_features = []
for seq_i in seqs:
for feature in seq_i.features:
if feature.type == "CDS" and (locus_tag == "all" or \
('locus_tag' in feature.qualifiers and \
... | Find features in sequences by locus tag |
def ase(dbuser, dbpassword, args, gui):
"""Connection to atomic structures on the Catalysis-Hub
server with ase db cli.
Arguments to the the ase db cli client must be enclosed in one string.
For example: <cathub ase 'formula=Ag6In6H -s energy -L 200'>.
To see possible ase db arguments ru... | Connection to atomic structures on the Catalysis-Hub
server with ase db cli.
Arguments to the the ase db cli client must be enclosed in one string.
For example: <cathub ase 'formula=Ag6In6H -s energy -L 200'>.
To see possible ase db arguments run <ase db --help> |
def update_domain_queues(self):
'''
Check to update existing queues already in memory
new queues are created elsewhere
'''
for key in self.domain_config:
final_key = "{name}:{domain}:queue".format(
name=self.spider.name,
domain=... | Check to update existing queues already in memory
new queues are created elsewhere |
def search_mergedcell_value(xl_sheet, merged_range):
"""
Search for a value in merged_range cells.
"""
for search_row_idx in range(merged_range[0], merged_range[1]):
for search_col_idx in range(merged_range[2], merged_range[3]):
if xl_sheet.cell(search_row_idx, search_col_idx).value:... | Search for a value in merged_range cells. |
def _clear(self):
'''
Actual clear
'''
ret = ([],[])
for q in self.queues.values():
pr = q._clear()
ret[0].extend(pr[0])
ret[1].extend(pr[1])
self.totalSize = 0
del self.prioritySet[:]
if self.isWaited and self.canAppend... | Actual clear |
def dataframe(self):
"""
Returns a pandas DataFrame containing all other class properties and
values. The index for the DataFrame is the string URI that is used to
instantiate the class, such as '201806070VEG'.
"""
if self._away_goals is None and self._home_goals is None:... | Returns a pandas DataFrame containing all other class properties and
values. The index for the DataFrame is the string URI that is used to
instantiate the class, such as '201806070VEG'. |
def GetByteSize(self):
"""Retrieves the byte size of the data type definition.
Returns:
int: data type size in bytes or None if size cannot be determined.
"""
if not self.element_data_type_definition:
return None
if self.elements_data_size:
return self.elements_data_size
if ... | Retrieves the byte size of the data type definition.
Returns:
int: data type size in bytes or None if size cannot be determined. |
def check_positive_flux(cls, kwargs_ps):
"""
check whether inferred linear parameters are positive
:param kwargs_ps:
:return: bool
"""
pos_bool = True
for kwargs in kwargs_ps:
point_amp = kwargs['point_amp']
for amp in point_amp:
... | check whether inferred linear parameters are positive
:param kwargs_ps:
:return: bool |
def load_genotypes(self):
"""Actually loads the first chunk of genotype data into memory due to \
the individual oriented format of MACH data.
Due to the fragmented approach to data loading necessary to avoid
running out of RAM, this function will initialize the data structures
... | Actually loads the first chunk of genotype data into memory due to \
the individual oriented format of MACH data.
Due to the fragmented approach to data loading necessary to avoid
running out of RAM, this function will initialize the data structures
with the first chunk of loci and prep... |
def _on_io_events(self, fd=None, _events=None):
"""Invoked by Tornado's IOLoop when there are events for the fd
:param int fd: The file descriptor for the event
:param int _events: The events raised
"""
if fd not in self._connections:
LOGGER.warning('Received IO eve... | Invoked by Tornado's IOLoop when there are events for the fd
:param int fd: The file descriptor for the event
:param int _events: The events raised |
def __get_overall_data(self, x):
"""
(recursive) Collect all "sensorGenus" and "sensorSpecies" fields, set data to self
:param any x: Any data type
:return none:
"""
if isinstance(x, dict):
if "sensorGenus" in x:
if x["sensorGenus"] and x["sens... | (recursive) Collect all "sensorGenus" and "sensorSpecies" fields, set data to self
:param any x: Any data type
:return none: |
def set_sample_probability(probability):
"""Set the probability that a batch will be submitted to the InfluxDB
server. This should be a value that is greater than or equal to ``0`` and
less than or equal to ``1.0``. A value of ``0.25`` would represent a
probability of 25% that a batch would be written t... | Set the probability that a batch will be submitted to the InfluxDB
server. This should be a value that is greater than or equal to ``0`` and
less than or equal to ``1.0``. A value of ``0.25`` would represent a
probability of 25% that a batch would be written to InfluxDB.
:param float probability: The v... |
def conv_gru(x,
kernel_size,
filters,
padding="SAME",
dilation_rate=(1, 1),
name=None,
reuse=None):
"""Convolutional GRU in 1 dimension."""
# Let's make a shorthand for conv call first.
def do_conv(args, name, bias_start, padding):
... | Convolutional GRU in 1 dimension. |
def run_transaction(transactor, callback):
"""Run a transaction with retries.
``callback()`` will be called with one argument to execute the
transaction. ``callback`` may be called more than once; it should have
no side effects other than writes to the database on the given
connection. ``callback``... | Run a transaction with retries.
``callback()`` will be called with one argument to execute the
transaction. ``callback`` may be called more than once; it should have
no side effects other than writes to the database on the given
connection. ``callback`` should not call ``commit()` or ``rollback()``;
... |
def escape_identifier(text, reg=KWD_RE):
"""Escape partial C identifiers so they can be used as
attributes/arguments"""
# see http://docs.python.org/reference/lexical_analysis.html#identifiers
if not text:
return "_"
if text[0].isdigit():
text = "_" + text
return reg.sub(r"\1_",... | Escape partial C identifiers so they can be used as
attributes/arguments |
def get(self):
"""
Get a JSON-ready representation of this ClickTracking.
:returns: This ClickTracking, ready for use in a request body.
:rtype: dict
"""
click_tracking = {}
if self.enable is not None:
click_tracking["enable"] = self.enable
i... | Get a JSON-ready representation of this ClickTracking.
:returns: This ClickTracking, ready for use in a request body.
:rtype: dict |
def iter_successors(self, graph, orig, branch, turn, tick, *, forward=None):
"""Iterate over successors of a given origin node at a given time."""
if self.db._no_kc:
yield from self._adds_dels_sucpred(self.successors[graph, orig], branch, turn, tick)[0]
return
if forward ... | Iterate over successors of a given origin node at a given time. |
def to_vector(np_array):
"""Convert numpy array to MLlib Vector
"""
if len(np_array.shape) == 1:
return Vectors.dense(np_array)
else:
raise Exception("An MLLib Vector can only be created from a one-dimensional " +
"numpy array, got {}".format(len(np_array.shape))) | Convert numpy array to MLlib Vector |
def nextindx(self):
'''
Determine the next insert offset according to storage.
Returns:
int: The next insert offset.
'''
indx = 0
with s_lmdbslab.Scan(self.slab, self.db) as curs:
last_key = curs.last_key()
if last_key is not None:
... | Determine the next insert offset according to storage.
Returns:
int: The next insert offset. |
def load_config(self, config=None):
''' loads a config file
Parameters:
config (str):
Optional name of manual config file to load
'''
# Read the config file
cfgname = (config or self.config_name)
cfgname = 'sdsswork' if cfgname is None else c... | loads a config file
Parameters:
config (str):
Optional name of manual config file to load |
def cleanUpdatesList(self, col, cellIdx, seg):
"""
Removes any update that would be for the given col, cellIdx, segIdx.
NOTE: logically, we need to do this when we delete segments, so that if
an update refers to a segment that was just deleted, we also remove
that update from the update list. Howeve... | Removes any update that would be for the given col, cellIdx, segIdx.
NOTE: logically, we need to do this when we delete segments, so that if
an update refers to a segment that was just deleted, we also remove
that update from the update list. However, I haven't seen it trigger
in any of the unit tests y... |
def api(server, command, *args, **kwargs):
'''
Call the Spacewalk xmlrpc api.
CLI Example:
.. code-block:: bash
salt-run spacewalk.api spacewalk01.domain.com systemgroup.create MyGroup Description
salt-run spacewalk.api spacewalk01.domain.com systemgroup.create arguments='["MyGroup", ... | Call the Spacewalk xmlrpc api.
CLI Example:
.. code-block:: bash
salt-run spacewalk.api spacewalk01.domain.com systemgroup.create MyGroup Description
salt-run spacewalk.api spacewalk01.domain.com systemgroup.create arguments='["MyGroup", "Description"]'
State Example:
.. code-block:... |
def get_issuer(request):
"""
Gets the Issuer of the Logout Request Message
:param request: Logout Request Message
:type request: string|DOMDocument
:return: The Issuer
:rtype: string
"""
if isinstance(request, etree._Element):
elem = request
... | Gets the Issuer of the Logout Request Message
:param request: Logout Request Message
:type request: string|DOMDocument
:return: The Issuer
:rtype: string |
def calibrate(filename):
"""
Append the calibration parameters as variables of the netcdf file.
Keyword arguments:
filename -- the name of a netcdf file.
"""
params = calibration_to(filename)
with nc.loader(filename) as root:
for key, value in params.items():
nc.getdim(r... | Append the calibration parameters as variables of the netcdf file.
Keyword arguments:
filename -- the name of a netcdf file. |
def add_nodes_from(self, nodes, weights=None):
"""
Add multiple nodes to the Graph.
**The behviour of adding weights is different than in networkx.
Parameters
----------
nodes: iterable container
A container of nodes (list, dict, set, or any hashable python
... | Add multiple nodes to the Graph.
**The behviour of adding weights is different than in networkx.
Parameters
----------
nodes: iterable container
A container of nodes (list, dict, set, or any hashable python
object).
weights: list, tuple (default=None)
... |
def correlate(h1, h2): # 31 us @array, 55 us @list \w 100 bins
r"""
Correlation between two histograms.
The histogram correlation between two histograms :math:`H` and :math:`H'` of size :math:`m`
is defined as:
.. math::
d_{corr}(H, H') =
\frac{
\sum_{m=1}... | r"""
Correlation between two histograms.
The histogram correlation between two histograms :math:`H` and :math:`H'` of size :math:`m`
is defined as:
.. math::
d_{corr}(H, H') =
\frac{
\sum_{m=1}^M (H_m-\bar{H}) \cdot (H'_m-\bar{H'})
}{
\sqrt... |
def update_execution_state_kernel(self):
"""Update actions following the execution state of the kernel."""
client = self.get_current_client()
if client is not None:
executing = client.stop_button.isEnabled()
self.interrupt_action.setEnabled(executing) | Update actions following the execution state of the kernel. |
def update(self, dict_name, mapping=None, priorities=None, expire=None,
locks=None):
'''Add mapping to a dictionary, replacing previous values
Can be called with only dict_name and expire to refresh the
expiration time.
NB: locks are only enforced if present, so nothing ... | Add mapping to a dictionary, replacing previous values
Can be called with only dict_name and expire to refresh the
expiration time.
NB: locks are only enforced if present, so nothing prevents
another caller from coming in an modifying data without using
locks.
:param m... |
def get_distribute_verbatim_metadata(self):
"""Gets the metadata for the distribute verbatim rights flag.
return: (osid.Metadata) - metadata for the distribution rights
fields
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from tem... | Gets the metadata for the distribute verbatim rights flag.
return: (osid.Metadata) - metadata for the distribution rights
fields
*compliance: mandatory -- This method must be implemented.* |
def add(self, entry):
"""
Override this to influence the mechanics of the Archive.
Assumes entry is a seq beginning with (nm, pth, ...) where
nm is the key by which we'll be asked for the object.
pth is the name of where we find the object. Overrides of
get_obj_from can m... | Override this to influence the mechanics of the Archive.
Assumes entry is a seq beginning with (nm, pth, ...) where
nm is the key by which we'll be asked for the object.
pth is the name of where we find the object. Overrides of
get_obj_from can make use of further elements in entry. |
def create(self, handle, title=None, description=None):
""" Create a role """
role = Role(handle=handle, title=title, description=description)
schema = RoleSchema()
valid = schema.process(role)
if not valid:
return valid
db.session.add(role)
db.sessio... | Create a role |
def getJobStatus(self, workers):
"""
Parameters:
----------------------------------------------------------------------
workers: If this job was launched outside of the nupic job engine, then this
is an array of subprocess Popen instances, one for each worker
retval: _NupicJo... | Parameters:
----------------------------------------------------------------------
workers: If this job was launched outside of the nupic job engine, then this
is an array of subprocess Popen instances, one for each worker
retval: _NupicJob.JobStatus instance |
def _quote_username(name):
'''
Usernames can only contain ascii chars, so make sure we return a str type
'''
if not isinstance(name, six.string_types):
return str(name) # future lint: disable=blacklisted-function
else:
return salt.utils.stringutils.to_str(name) | Usernames can only contain ascii chars, so make sure we return a str type |
def storage_type(self):
"""Depending on input data type, the storage type is either
"field" (complex) or "phase" (real)."""
nf = np.load(str(self.path), mmap_mode="c", allow_pickle=False)
if np.iscomplexobj(nf):
st = "field"
else:
st = "phase"
retu... | Depending on input data type, the storage type is either
"field" (complex) or "phase" (real). |
def cancel_job(
self,
project_id,
region,
job_id,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Starts a job cancellation request. To access the job resource after
... | Starts a job cancellation request. To access the job resource after
cancellation, call
`regions/{region}/jobs.list <https://cloud.google.com/dataproc/docs/reference/rest/v1beta2/projects.regions.jobs/list>`__
or
`regions/{region}/jobs.get <https://cloud.google.com/dataproc/docs/reference... |
def volume(self):
"""
The analytic volume of the cylinder primitive.
Returns
---------
volume : float
Volume of the cylinder
"""
volume = ((np.pi * self.primitive.radius ** 2) *
self.primitive.height)
return volume | The analytic volume of the cylinder primitive.
Returns
---------
volume : float
Volume of the cylinder |
def cli(ctx, board, fpga, pack, type, size, project_dir,
verbose, verbose_yosys, verbose_arachne):
"""Bitstream timing analysis."""
# Run scons
exit_code = SCons(project_dir).time({
'board': board,
'fpga': fpga,
'size': size,
'type': type,
'pack': pack,
... | Bitstream timing analysis. |
def autoLayout(self):
"""
Automatically lays out the contents for this widget.
"""
try:
direction = self.currentSlide().scene().direction()
except AttributeError:
direction = QtGui.QBoxLayout.TopToBottom
size = self.size()
... | Automatically lays out the contents for this widget. |
def fileopenbox(msg=None
, title=None
, default="*"
, filetypes=None
):
"""
A dialog to get a file name.
About the "default" argument
============================
The "default" argument specifies a filepath that (normally)
contains one or more wildcards.
fileopen... | A dialog to get a file name.
About the "default" argument
============================
The "default" argument specifies a filepath that (normally)
contains one or more wildcards.
fileopenbox will display only files that match the default filepath.
If omitted, defaults to "*" (al... |
def _parse_accented_syllable(unparsed_syllable):
"""Return the syllable and tone of an accented Pinyin syllable.
Any accented vowels are returned without their accents.
Implements the following algorithm:
1. If the syllable has an accent mark, convert that vowel to a
regular vowel and add the... | Return the syllable and tone of an accented Pinyin syllable.
Any accented vowels are returned without their accents.
Implements the following algorithm:
1. If the syllable has an accent mark, convert that vowel to a
regular vowel and add the tone to the end of the syllable.
2. Otherwise, assu... |
def get_last_rconfiguration_id(topic_id, remoteci_id, db_conn=None):
"""Get the rconfiguration_id of the last job run by the remoteci.
:param topic_id: the topic
:param remoteci_id: the remoteci id
:return: last rconfiguration_id of the remoteci
"""
db_conn = db_conn or flask.g.db_conn
__TA... | Get the rconfiguration_id of the last job run by the remoteci.
:param topic_id: the topic
:param remoteci_id: the remoteci id
:return: last rconfiguration_id of the remoteci |
def string_format(data, out='nested', opts=None, **kwargs):
'''
Return the outputter formatted string, removing the ANSI escape sequences.
data
The JSON serializable object.
out: ``nested``
The name of the output to use to transform the data. Default: ``nested``.
opts
Dict... | Return the outputter formatted string, removing the ANSI escape sequences.
data
The JSON serializable object.
out: ``nested``
The name of the output to use to transform the data. Default: ``nested``.
opts
Dictionary of configuration options. Default: ``__opts__``.
kwargs
... |
def _get_broadcast_shape(shape1, shape2):
"""Given two shapes that are not identical, find the shape
that both input shapes can broadcast to."""
if shape1 == shape2:
return shape1
length1 = len(shape1)
length2 = len(shape2)
if length1 > length2:
shape = list(shape1)
else:
... | Given two shapes that are not identical, find the shape
that both input shapes can broadcast to. |
def small_integer(anon, obj, field, val):
"""
Returns a random small integer (for a Django SmallIntegerField)
"""
return anon.faker.small_integer(field=field) | Returns a random small integer (for a Django SmallIntegerField) |
def set_connection_params(self, ip_address, tsap_snap7, tsap_logo):
"""
Sets internally (IP, LocalTSAP, RemoteTSAP) Coordinates.
This function must be called just before Cli_Connect().
:param ip_address: IP ip_address of server
:param tsap_snap7: TSAP SNAP7 Client (e.g. 10.00 = ... | Sets internally (IP, LocalTSAP, RemoteTSAP) Coordinates.
This function must be called just before Cli_Connect().
:param ip_address: IP ip_address of server
:param tsap_snap7: TSAP SNAP7 Client (e.g. 10.00 = 0x1000)
:param tsap_logo: TSAP Logo Server (e.g. 20.00 = 0x2000) |
def compose_path(pub, uuid_url=False):
"""
Compose absolute path for given `pub`.
Args:
pub (obj): :class:`.DBPublication` instance.
uuid_url (bool, default False): Compose URL using UUID.
Returns:
str: Absolute url-path of the publication, without server's address \
... | Compose absolute path for given `pub`.
Args:
pub (obj): :class:`.DBPublication` instance.
uuid_url (bool, default False): Compose URL using UUID.
Returns:
str: Absolute url-path of the publication, without server's address \
and protocol.
Raises:
PrivatePublic... |
def all(cls, include_deactivated=False):
"""
Get all resources
:param include_deactivated: Include deactivated resources in response
:returns: list of Document instances
:raises: SocketError, CouchException
"""
if include_deactivated:
resources = yiel... | Get all resources
:param include_deactivated: Include deactivated resources in response
:returns: list of Document instances
:raises: SocketError, CouchException |
def isometric_build_atlased_mesh(script, BorderSize=0.1):
"""Isometric parameterization: Build Atlased Mesh
This actually generates the UV mapping from the isometric parameterization
"""
filter_xml = ''.join([
' <filter name="Iso Parametrization Build Atlased Mesh">\n',
' <Param na... | Isometric parameterization: Build Atlased Mesh
This actually generates the UV mapping from the isometric parameterization |
def postorder(self, node=None):
"""Walk the tree in roughly 'postorder' (a bit of a lie
explained below).
For each node with typestring name *name* if the
node has a method called n_*name*, call that before walking
children. If there is no method define, call a
self.defa... | Walk the tree in roughly 'postorder' (a bit of a lie
explained below).
For each node with typestring name *name* if the
node has a method called n_*name*, call that before walking
children. If there is no method define, call a
self.default(node) instead. Subclasses of GenericAST... |
def get_event_log(self, object_id):
"""Get the specified event log."""
content = self._fetch("/event_log/%s" % object_id, method="GET")
return FastlyEventLog(self, content) | Get the specified event log. |
def get_action_group_names(self):
"""Return all the security group names configured in this action."""
return self.get_group_names(
list(itertools.chain(
*[self._get_array('add'),
self._get_array('remove'),
self._get_array('isolation-group'... | Return all the security group names configured in this action. |
def qImageToArray(qimage, dtype = 'array'):
"""Convert QImage to numpy.ndarray. The dtype defaults to uint8
for QImage.Format_Indexed8 or `bgra_dtype` (i.e. a record array)
for 32bit color images. You can pass a different dtype to use, or
'array' to get a 3D uint8 array for color images."""
... | Convert QImage to numpy.ndarray. The dtype defaults to uint8
for QImage.Format_Indexed8 or `bgra_dtype` (i.e. a record array)
for 32bit color images. You can pass a different dtype to use, or
'array' to get a 3D uint8 array for color images. |
def _inject_target(self, target_adaptor):
"""Inject a target, respecting all sources of dependencies."""
target_cls = self._target_types[target_adaptor.type_alias]
declared_deps = target_adaptor.dependencies
implicit_deps = (Address.parse(s,
relative_to=target_adaptor... | Inject a target, respecting all sources of dependencies. |
def trace2(A, B):
r"""Trace of :math:`\mathrm A \mathrm B^\intercal`.
Args:
A (array_like): Left-hand side.
B (array_like): Right-hand side.
Returns:
float: Trace of :math:`\mathrm A \mathrm B^\intercal`.
"""
A = asarray(A, float)
B = asarray(B, float)
layout_error... | r"""Trace of :math:`\mathrm A \mathrm B^\intercal`.
Args:
A (array_like): Left-hand side.
B (array_like): Right-hand side.
Returns:
float: Trace of :math:`\mathrm A \mathrm B^\intercal`. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.