code stringlengths 75 104k | docstring stringlengths 1 46.9k |
|---|---|
def treeAggregate(self, zeroValue, seqOp, combOp, depth=2):
"""
Aggregates the elements of this RDD in a multi-level tree
pattern.
:param depth: suggested depth of the tree (default: 2)
>>> add = lambda x, y: x + y
>>> rdd = sc.parallelize([-5, -4, -3, -2, -1, 1, 2, 3, ... | Aggregates the elements of this RDD in a multi-level tree
pattern.
:param depth: suggested depth of the tree (default: 2)
>>> add = lambda x, y: x + y
>>> rdd = sc.parallelize([-5, -4, -3, -2, -1, 1, 2, 3, 4], 10)
>>> rdd.treeAggregate(0, add, add)
-5
>>> rdd.tr... |
def spkopa(filename):
"""
Open an existing SPK file for subsequent write.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/spkopa_c.html
:param filename: The name of an existing SPK file.
:type filename: str
:return: A handle attached to the SPK file opened to append.
:rtype: int
... | Open an existing SPK file for subsequent write.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/spkopa_c.html
:param filename: The name of an existing SPK file.
:type filename: str
:return: A handle attached to the SPK file opened to append.
:rtype: int |
def __execute_scale(self, surface, size_to_scale_from):
"""Execute the scaling operation"""
x = size_to_scale_from[0] * self.__scale[0]
y = size_to_scale_from[1] * self.__scale[1]
scaled_value = (int(x), int(y))
## #Find out what scaling technique we should use.
## if self... | Execute the scaling operation |
def run_tasks(cls):
"""Internal task-runner class method, called by :py:func:`sisy.consumers.run_heartbeat`"""
now = timezone.now()
tasks = cls.objects.filter(enabled=True)
for task in tasks:
if task.next_run == HAS_NOT_RUN:
task.calc_next_run()
if... | Internal task-runner class method, called by :py:func:`sisy.consumers.run_heartbeat` |
def transform_streams_for_comparison(outputs):
"""Makes failure output for streams better by having key be the stream name"""
new_outputs = []
for output in outputs:
if (output.output_type == 'stream'):
# Transform output
new_outputs.append({
'output_type': 's... | Makes failure output for streams better by having key be the stream name |
def return_secondary_learner(self):
"""Returns secondary learner using its origin and the given hyperparameters
Returns:
est (estimator): Estimator object
"""
estimator = self.base_learner_origin.return_estimator()
estimator = estimator.set_params(**self.secondary_le... | Returns secondary learner using its origin and the given hyperparameters
Returns:
est (estimator): Estimator object |
def ltrimboth (l,proportiontocut):
"""
Slices off the passed proportion of items from BOTH ends of the passed
list (i.e., with proportiontocut=0.1, slices 'leftmost' 10% AND 'rightmost'
10% of scores. Assumes list is sorted by magnitude. Slices off LESS if
proportion results in a non-integer slice index (i.e., co... | Slices off the passed proportion of items from BOTH ends of the passed
list (i.e., with proportiontocut=0.1, slices 'leftmost' 10% AND 'rightmost'
10% of scores. Assumes list is sorted by magnitude. Slices off LESS if
proportion results in a non-integer slice index (i.e., conservatively
slices off proportiontocut).
... |
def get_host_cache(service_instance=None):
'''
Returns the host cache configuration on the proxy host.
service_instance
Service instance (vim.ServiceInstance) of the vCenter/ESXi host.
Default is None.
.. code-block:: bash
salt '*' vsphere.get_host_cache
'''
# Default ... | Returns the host cache configuration on the proxy host.
service_instance
Service instance (vim.ServiceInstance) of the vCenter/ESXi host.
Default is None.
.. code-block:: bash
salt '*' vsphere.get_host_cache |
def get_html_text_editor(
name,
id=None,
content='',
textual_content=None,
width='300px',
height='200px',
enabled=True,
file_upload_url=None,
toolbar_set="Basic",
custom_configurations_path='/js/ckeditor/invenio-ckeditor-config.js',
... | Returns a wysiwyg editor (CKEditor) to embed in html pages.
Fall back to a simple textarea when the library is not installed,
or when the user's browser is not compatible with the editor, or
when 'enable' is False, or when javascript is not enabled.
NOTE that the output also contains a hidden field na... |
def switch_toggle(context, ain):
"""Toggle an actor's power state"""
context.obj.login()
actor = context.obj.get_actor_by_ain(ain)
if actor:
if actor.get_state():
actor.switch_off()
click.echo("State for {} is now OFF".format(ain))
else:
actor.switch_o... | Toggle an actor's power state |
def _remove_event_source(awsclient, evt_source, lambda_arn):
"""
Given an event_source dictionary, create the object and remove the event source.
"""
event_source_obj = _get_event_source_obj(awsclient, evt_source)
if event_source_obj.exists(lambda_arn):
event_source_obj.remove(lambda_arn) | Given an event_source dictionary, create the object and remove the event source. |
def systemd(
state, host, name,
running=True, restarted=False, reloaded=False,
command=None, enabled=None, daemon_reload=False,
):
'''
Manage the state of systemd managed services.
+ name: name of the service to manage
+ running: whether the service should be running
+ restarted: whethe... | Manage the state of systemd managed services.
+ name: name of the service to manage
+ running: whether the service should be running
+ restarted: whether the service should be restarted
+ reloaded: whether the service should be reloaded
+ command: custom command to pass like: ``/etc/rc.d/<name> <co... |
def validateDocumentFinal(self, ctxt):
"""Does the final step for the document validation once all
the incremental validation steps have been completed
basically it does the following checks described by the XML
Rec Check all the IDREF/IDREFS attributes definition for
v... | Does the final step for the document validation once all
the incremental validation steps have been completed
basically it does the following checks described by the XML
Rec Check all the IDREF/IDREFS attributes definition for
validity |
def get_serializer(self, instance=None, data=None,
many=False, partial=False):
"""
Return the serializer instance that should be used for validating and
deserializing input, and for serializing output.
"""
serializers = {
'node': NodeRequestList... | Return the serializer instance that should be used for validating and
deserializing input, and for serializing output. |
def raise_(type_, value=None, traceback=None): # pylint: disable=W0613
"""
Does the same as ordinary ``raise`` with arguments do in Python 2.
But works in Python 3 (>= 3.3) also!
Please checkout README on https://github.com/9seconds/pep3134
to get an idea about possible pitfals. But short story is... | Does the same as ordinary ``raise`` with arguments do in Python 2.
But works in Python 3 (>= 3.3) also!
Please checkout README on https://github.com/9seconds/pep3134
to get an idea about possible pitfals. But short story is: please
be pretty carefull with tracebacks. If it is possible, use sys.exc_info... |
def FDMT(data, f_min, f_max, maxDT, dataType):
"""
This function implements the FDMT algorithm.
Input: Input visibility array (nints, nbl, nchan, npol)
f_min,f_max are the base-band begin and end frequencies.
The frequencies should be entered in MHz
maxDT - the max... | This function implements the FDMT algorithm.
Input: Input visibility array (nints, nbl, nchan, npol)
f_min,f_max are the base-band begin and end frequencies.
The frequencies should be entered in MHz
maxDT - the maximal delay (in time bins) of the maximal dispersion.
... |
def split_no_wd_params(layer_groups:Collection[nn.Module])->List[List[nn.Parameter]]:
"Separate the parameters in `layer_groups` between `no_wd_types` and bias (`bias_types`) from the rest."
split_params = []
for l in layer_groups:
l1,l2 = [],[]
for c in l.children():
if isinsta... | Separate the parameters in `layer_groups` between `no_wd_types` and bias (`bias_types`) from the rest. |
async def ensure_process(self):
"""
Start the process
"""
# We don't want multiple requests trying to start the process at the same time
# FIXME: Make sure this times out properly?
# Invariant here should be: when lock isn't being held, either 'proc' is in state &
... | Start the process |
def load_auth_from_file(filename):
"""Initializes the auth settings for accessing MyAnimelist through its
official API from a given filename.
:param filename The name of the file containing your MyAnimeList
credentials
REQUIREMENTS: The file must...
... | Initializes the auth settings for accessing MyAnimelist through its
official API from a given filename.
:param filename The name of the file containing your MyAnimeList
credentials
REQUIREMENTS: The file must...
...username for your MAL account.
... |
def setFixedHeight(self, height):
"""
Sets the fixed height for this item to the inputed height amount.
:param height | <int>
"""
super(XViewPanelItem, self).setFixedHeight(height)
self._dragLabel.setFixedHeight(height)
self._titleLabel.setFixedHeight(heigh... | Sets the fixed height for this item to the inputed height amount.
:param height | <int> |
def rpc_call(self, request, method=None, params=None, **kwargs):
""" Call a RPC method.
return object: a result
"""
args = []
kwargs = dict()
if isinstance(params, dict):
kwargs.update(params)
else:
args = list(as_tuple(params))
... | Call a RPC method.
return object: a result |
def play(self, call_params):
"""REST Play something on a Call Helper
"""
path = '/' + self.api_version + '/Play/'
method = 'POST'
return self.request(path, method, call_params) | REST Play something on a Call Helper |
def sub_article_folders(self):
"""
Returns all valid ArticleFolder sitting inside of
:attr:`ArticleFolder.dir_path`.
"""
l = list()
for p in Path.sort_by_fname(
Path(self.dir_path).select_dir(recursive=False)
):
af = ArticleFolder(dir_p... | Returns all valid ArticleFolder sitting inside of
:attr:`ArticleFolder.dir_path`. |
def importance(self, attribute, examples):
"""
AIMA implies that importance should be information gain.
Since AIMA only defines it for binary features this implementation
was based on the wikipedia article:
http://en.wikipedia.org/wiki/Information_gain_in_decision_trees
"... | AIMA implies that importance should be information gain.
Since AIMA only defines it for binary features this implementation
was based on the wikipedia article:
http://en.wikipedia.org/wiki/Information_gain_in_decision_trees |
def DEFAULT_RENAMER(L, Names=None):
"""
Renames overlapping column names of numpy ndarrays with structured dtypes
Rename the columns by using a simple convention:
* If `L` is a list, it will append the number in the list to the key
associated with the array.
* If `L` is a dictionary,... | Renames overlapping column names of numpy ndarrays with structured dtypes
Rename the columns by using a simple convention:
* If `L` is a list, it will append the number in the list to the key
associated with the array.
* If `L` is a dictionary, the algorithm will append the string
r... |
def request_issuance(self, csr):
"""
Request a certificate.
Authorizations should have already been completed for all of the names
requested in the CSR.
Note that unlike `acme.client.Client.request_issuance`, the certificate
resource will have the body data as raw bytes... | Request a certificate.
Authorizations should have already been completed for all of the names
requested in the CSR.
Note that unlike `acme.client.Client.request_issuance`, the certificate
resource will have the body data as raw bytes.
.. seealso:: `txacme.util.csr_for_names`
... |
def build(self, tag, **kwargs):
"""
Identical to :meth:`dockermap.client.base.DockerClientWrapper.build` with additional logging.
"""
self.push_log("Building image '{0}'.".format(tag))
set_raise_on_error(kwargs)
try:
return super(DockerFabricClient, self).buil... | Identical to :meth:`dockermap.client.base.DockerClientWrapper.build` with additional logging. |
def print_math(math_expression_lst, name = "math.html", out='html', formatter = lambda x: x):
"""
Converts LaTeX math expressions into an html layout.
Creates a html file in the directory where print_math is called
by default. Displays math to jupyter notebook if "notebook" argument
is specified.
... | Converts LaTeX math expressions into an html layout.
Creates a html file in the directory where print_math is called
by default. Displays math to jupyter notebook if "notebook" argument
is specified.
Args:
math_expression_lst (list): A list of LaTeX math (string) to be rendered by KaTeX
... |
def generateXY(self, **kwargs):
""" Generate source catalog from input image using DAOFIND-style algorithm
"""
#x,y,flux,sharp,round = idlphot.find(array,self.pars['hmin'],self.pars['fwhm'],
# roundlim=self.pars['roundlim'], sharplim=self.pars['sharplim'])
prin... | Generate source catalog from input image using DAOFIND-style algorithm |
def to_line_string(self, closed=True):
"""
Convert this polygon's `exterior` to a ``LineString`` instance.
Parameters
----------
closed : bool, optional
Whether to close the line string, i.e. to add the first point of
the `exterior` also as the last point... | Convert this polygon's `exterior` to a ``LineString`` instance.
Parameters
----------
closed : bool, optional
Whether to close the line string, i.e. to add the first point of
the `exterior` also as the last point at the end of the line string.
This has no eff... |
def open(safe_file):
"""Return a SentinelDataSet object."""
if os.path.isdir(safe_file) or os.path.isfile(safe_file):
return SentinelDataSet(safe_file)
else:
raise IOError("file not found: %s" % safe_file) | Return a SentinelDataSet object. |
def quaternion_from_euler(angles, order='yzy'):
"""Generate a quaternion from a set of Euler angles.
Args:
angles (array_like): Array of Euler angles.
order (str): Order of Euler rotations. 'yzy' is default.
Returns:
Quaternion: Quaternion representation of Euler rotation.
"""... | Generate a quaternion from a set of Euler angles.
Args:
angles (array_like): Array of Euler angles.
order (str): Order of Euler rotations. 'yzy' is default.
Returns:
Quaternion: Quaternion representation of Euler rotation. |
def _extract_from_url(self, url):
"""Try to extract from the article URL - simple but might work as a fallback"""
# Regex by Newspaper3k - https://github.com/codelucas/newspaper/blob/master/newspaper/urls.py
m = re.search(re_pub_date, url)
if m:
return self.parse_date_str(m... | Try to extract from the article URL - simple but might work as a fallback |
def checkout(self, ref, cb=None):
"""Checkout a bundle from the remote. Returns a file-like object"""
if self.is_api:
return self._checkout_api(ref, cb=cb)
else:
return self._checkout_fs(ref, cb=cb) | Checkout a bundle from the remote. Returns a file-like object |
def fromrandom(shape=(10, 50, 50), npartitions=1, seed=42, engine=None):
"""
Generate random image data.
Parameters
----------
shape : tuple, optional, default=(10, 50, 50)
Dimensions of images.
npartitions : int, optional, default=1
Number of partitions.
seed : int, optio... | Generate random image data.
Parameters
----------
shape : tuple, optional, default=(10, 50, 50)
Dimensions of images.
npartitions : int, optional, default=1
Number of partitions.
seed : int, optional, default=42
Random seed. |
def draw(self, scr):
'Draw entire screen onto the `scr` curses object.'
numHeaderRows = 1
scr.erase() # clear screen before every re-draw
vd().refresh()
if not self.columns:
return
color_current_row = CursesAttr(colors.color_current_row, 5)
disp_co... | Draw entire screen onto the `scr` curses object. |
def hashify_files(files: list) -> dict:
"""Return mapping from file path to file hash."""
return {filepath.replace('\\', '/'): hash_tree(filepath)
for filepath in listify(files)} | Return mapping from file path to file hash. |
def all_referenced_targets(self, result):
"""Returns all targets referenced by this subvariant,
either directly or indirectly, and either as sources,
or as dependency properties. Targets referred with
dependency property are returned a properties, not targets."""
if __debug__:
... | Returns all targets referenced by this subvariant,
either directly or indirectly, and either as sources,
or as dependency properties. Targets referred with
dependency property are returned a properties, not targets. |
def makevAndvPfuncs(self,policyFunc):
'''
Constructs the marginal value function for this period.
Parameters
----------
policyFunc : function
Consumption and medical care function for this period, defined over
market resources, permanent income level, and... | Constructs the marginal value function for this period.
Parameters
----------
policyFunc : function
Consumption and medical care function for this period, defined over
market resources, permanent income level, and the medical need shock.
Returns
-------
... |
def statisticalInefficiency(A_n, B_n=None, fast=False, mintime=3, fft=False):
"""Compute the (cross) statistical inefficiency of (two) timeseries.
Parameters
----------
A_n : np.ndarray, float
A_n[n] is nth value of timeseries A. Length is deduced from vector.
B_n : np.ndarray, float, opti... | Compute the (cross) statistical inefficiency of (two) timeseries.
Parameters
----------
A_n : np.ndarray, float
A_n[n] is nth value of timeseries A. Length is deduced from vector.
B_n : np.ndarray, float, optional, default=None
B_n[n] is nth value of timeseries B. Length is deduced fr... |
def _get_ctypes(self):
"""
Returns all related objects for this model.
"""
ctypes = []
for related_object in self.model._meta.get_all_related_objects():
model = getattr(related_object, 'related_model', related_object.model)
ctypes.append(ContentType.object... | Returns all related objects for this model. |
def VarintReader(buf, pos=0):
"""A 64 bit decoder from google.protobuf.internal.decoder."""
result = 0
shift = 0
while 1:
b = buf[pos]
result |= (ORD_MAP_AND_0X7F[b] << shift)
pos += 1
if not ORD_MAP_AND_0X80[b]:
return (result, pos)
shift += 7
if shift >= 64:
raise rdfvalue... | A 64 bit decoder from google.protobuf.internal.decoder. |
def read(self, fp):
"Reads a dictionary from an input stream."
base_size = struct.unpack(str("=I"), fp.read(4))[0]
self._units.fromfile(fp, base_size) | Reads a dictionary from an input stream. |
def pancake_sort(arr):
"""
Pancake_sort
Sorting a given array
mutation of selection sort
reference: https://www.geeksforgeeks.org/pancake-sorting/
Overall time complexity : O(N^2)
"""
len_arr = len(arr)
if len_arr <= 1:
return arr
for cur in range(len(arr), 1, -1):... | Pancake_sort
Sorting a given array
mutation of selection sort
reference: https://www.geeksforgeeks.org/pancake-sorting/
Overall time complexity : O(N^2) |
def iterate_pubmed_identifiers(graph) -> Iterable[str]:
"""Iterate over all PubMed identifiers in a graph.
:param pybel.BELGraph graph: A BEL graph
:return: An iterator over the PubMed identifiers in the graph
"""
return (
data[CITATION][CITATION_REFERENCE].strip()
for _, _, data in... | Iterate over all PubMed identifiers in a graph.
:param pybel.BELGraph graph: A BEL graph
:return: An iterator over the PubMed identifiers in the graph |
def runcode(code):
"""Run the given code line by line with printing, as list of lines, and return variable 'ans'."""
for line in code:
print('# '+line)
exec(line,globals())
print('# return ans')
return ans | Run the given code line by line with printing, as list of lines, and return variable 'ans'. |
def generic_pst(par_names=["par1"],obs_names=["obs1"],addreg=False):
"""generate a generic pst instance. This can used to later fill in
the Pst parts programatically.
Parameters
----------
par_names : (list)
parameter names to setup
obs_names : (list)
observation names to setup... | generate a generic pst instance. This can used to later fill in
the Pst parts programatically.
Parameters
----------
par_names : (list)
parameter names to setup
obs_names : (list)
observation names to setup
Returns
-------
new_pst : pyemu.Pst |
def main(argv=None):
"""Main command line interface."""
if argv is None:
argv = sys.argv[1:]
cli = CommandLineTool()
return cli.run(argv) | Main command line interface. |
def get_record(self, path=None, no_pdf=False,
test=False, refextract_callback=None):
"""Convert a record to MARCXML format.
:param path: path to a record.
:type path: string
:param test: flag to determine if it is a test call.
:type test: bool
:param r... | Convert a record to MARCXML format.
:param path: path to a record.
:type path: string
:param test: flag to determine if it is a test call.
:type test: bool
:param refextract_callback: callback to be used to extract
unstructured references. It ... |
def separate(self):
'''Return contiguous parts of collection as separate collections.
Return as list of :py:class:`~clique.collection.Collection` instances.
'''
collections = []
start = None
end = None
for index in self.indexes:
if start is None:
... | Return contiguous parts of collection as separate collections.
Return as list of :py:class:`~clique.collection.Collection` instances. |
def cover(ctx, html=False):
'''Run tests suite with coverage'''
params = '--cov-report term --cov-report html' if html else ''
with ctx.cd(ROOT):
ctx.run('pytest --cov flask_fs {0}'.format(params), pty=True) | Run tests suite with coverage |
def get_fernet():
"""
Deferred load of Fernet key.
This function could fail either because Cryptography is not installed
or because the Fernet key is invalid.
:return: Fernet object
:raises: airflow.exceptions.AirflowException if there's a problem trying to load Fernet
"""
global _fern... | Deferred load of Fernet key.
This function could fail either because Cryptography is not installed
or because the Fernet key is invalid.
:return: Fernet object
:raises: airflow.exceptions.AirflowException if there's a problem trying to load Fernet |
def draw_linecollection(data, obj):
"""Returns Pgfplots code for a number of patch objects.
"""
content = []
edgecolors = obj.get_edgecolors()
linestyles = obj.get_linestyles()
linewidths = obj.get_linewidths()
paths = obj.get_paths()
for i, path in enumerate(paths):
color = ed... | Returns Pgfplots code for a number of patch objects. |
async def start(self):
"""Start serving access to devices over bluetooth."""
self._command_task.start()
try:
await self._cleanup_old_connections()
except Exception:
await self.stop()
raise
#FIXME: This is a temporary hack, get the actual dev... | Start serving access to devices over bluetooth. |
def satisfies(self, other): # type: (Term) -> bool
"""
Returns whether this term satisfies another.
"""
return (
self.dependency.name == other.dependency.name
and self.relation(other) == SetRelation.SUBSET
) | Returns whether this term satisfies another. |
def configure_mongodb(self):
""" Configure MongoDB """
self._display_info("Trying default configuration")
host = "localhost"
database_name = "INGInious"
should_ask = True
if self.try_mongodb_opts(host, database_name):
should_ask = self._ask_boolean(
... | Configure MongoDB |
def dist(self):
"""Return the `Distribution` selected for Zinc based on execution strategy.
:rtype: pants.java.distribution.distribution.Distribution
"""
underlying_dist = self.underlying_dist
if self._execution_strategy != NailgunTaskBase.HERMETIC:
# symlink .pants.d/.jdk -> /some/java/home/... | Return the `Distribution` selected for Zinc based on execution strategy.
:rtype: pants.java.distribution.distribution.Distribution |
def imgmin(self):
"""
Lowest value of input image.
"""
if not hasattr(self, '_imgmin'):
imgmin = _np.min(self.images[0])
for img in self.images:
imin = _np.min(img)
if imin > imgmin:
imgmin = imin
se... | Lowest value of input image. |
def get_path(self):
"""Gets the path to the focused statistics. Each step is a hash of
statistics object.
"""
path = deque()
__, node = self.get_focus()
while not node.is_root():
stats = node.get_value()
path.appendleft(hash(stats))
nod... | Gets the path to the focused statistics. Each step is a hash of
statistics object. |
def SetupDisplayDevice(self, type, state, percentage, energy, energy_full,
energy_rate, time_to_empty, time_to_full, is_present,
icon_name, warning_level):
'''Convenience method to configure DisplayDevice properties
This calls Set() for all properties that the Disp... | Convenience method to configure DisplayDevice properties
This calls Set() for all properties that the DisplayDevice is defined to
have, and is shorter if you have to completely set it up instead of
changing just one or two properties.
This is only available when mocking the 1.0 API. |
def save_user(self, idvalue, options=None):
"""
save user by a given id
http://getstarted.sailthru.com/api/user
"""
options = options or {}
data = options.copy()
data['id'] = idvalue
return self.api_post('user', data) | save user by a given id
http://getstarted.sailthru.com/api/user |
def configure_threecolor_image(self):
"""
configures the three color image according to the requested parameters
:return: nothing, just updates self.image
"""
order = {'red': 0, 'green': 1, 'blue': 2}
self.image = np.zeros((self.shape[0], self.shape[1], 3))
for co... | configures the three color image according to the requested parameters
:return: nothing, just updates self.image |
def add(self, data, conn_type, squash=True):
"""
Combine this tree and the data represented by data using the
connector conn_type. The combine is done by squashing the node other
away if possible.
This tree (self) will never be pushed to a child node of the
combined tree... | Combine this tree and the data represented by data using the
connector conn_type. The combine is done by squashing the node other
away if possible.
This tree (self) will never be pushed to a child node of the
combined tree, nor will the connector or negated properties change.
R... |
def get_all_dhcp_options(self, dhcp_options_ids=None):
"""
Retrieve information about your DhcpOptions.
:type dhcp_options_ids: list
:param dhcp_options_ids: A list of strings with the desired DhcpOption ID's
:rtype: list
:return: A list of :class:`boto.vpc.dhcpoptions.... | Retrieve information about your DhcpOptions.
:type dhcp_options_ids: list
:param dhcp_options_ids: A list of strings with the desired DhcpOption ID's
:rtype: list
:return: A list of :class:`boto.vpc.dhcpoptions.DhcpOptions` |
def adopt(self, grab):
"""
Copy the state of another `Grab` instance.
Use case: create backup of current state to the cloned instance and
then restore the state from it.
"""
self.load_config(grab.config)
self.doc = grab.doc.copy(new_grab=self)
for key ... | Copy the state of another `Grab` instance.
Use case: create backup of current state to the cloned instance and
then restore the state from it. |
def dropdb(self, name):
'''
Deletes an **entire database** (i.e. a table), losing all data.
'''
if self.readonly:
raise s_exc.IsReadOnly()
while True:
try:
if not self.dbexists(name):
return
db = self.in... | Deletes an **entire database** (i.e. a table), losing all data. |
def assign(self, node):
"""
Translate an assign node into SQLQuery.
:param node: a treebrd node
:return: a SQLQuery object for the tree rooted at node
"""
child_object = self.translate(node.child)
child_object.prefix = 'CREATE TEMPORARY TABLE {name}({attributes}) ... | Translate an assign node into SQLQuery.
:param node: a treebrd node
:return: a SQLQuery object for the tree rooted at node |
def refresh(self):
""" Updates this drive with data from the server
:return: Success / Failure
:rtype: bool
"""
if self.object_id is None:
url = self.build_url(self._endpoints.get('default_drive'))
else:
url = self.build_url(
self... | Updates this drive with data from the server
:return: Success / Failure
:rtype: bool |
def do_EOF(self, args):
"""Exit on system end of file character"""
if _debug: ConsoleCmd._debug("do_EOF %r", args)
return self.do_exit(args) | Exit on system end of file character |
def is_same_as(self, other_databox, headers=True, columns=True, header_order=True, column_order=True, ckeys=True):
"""
Tests that the important (i.e. savable) information in this databox
is the same as that of the other_databox.
Parameters
----------
other_databo... | Tests that the important (i.e. savable) information in this databox
is the same as that of the other_databox.
Parameters
----------
other_databox
Databox with which to compare.
headers=True
Make sure all header elements match.
columns=True... |
def is_allowed(func):
"""Check user password, when is correct, then run decorated function.
:returns: decorated function
"""
@wraps(func)
def _is_allowed(user, *args, **kwargs):
password = kwargs.pop('password', None)
if user.check_password(password):
return func(user, ... | Check user password, when is correct, then run decorated function.
:returns: decorated function |
def song(self):
"""
:class:`Song` object of next song to play
"""
song = self._connection.request(
'autoplayGetSong',
{'weightModifierRange': [-9, 9],
'seedArtists': dict([(artist, 'p') for artist in self._artists]),
'tagID': self._radio,... | :class:`Song` object of next song to play |
def _predict(self, features):
"""Predict matches and non-matches.
Parameters
----------
features : numpy.ndarray
The data to predict the class of.
Returns
-------
numpy.ndarray
The predicted classes.
"""
from sklearn.exce... | Predict matches and non-matches.
Parameters
----------
features : numpy.ndarray
The data to predict the class of.
Returns
-------
numpy.ndarray
The predicted classes. |
def to_categorical(y, nb_classes, num_classes=None):
"""
Converts a class vector (integers) to binary class matrix.
This is adapted from the Keras function with the same name.
:param y: class vector to be converted into a matrix
(integers from 0 to nb_classes).
:param nb_classes: nb_classes: total... | Converts a class vector (integers) to binary class matrix.
This is adapted from the Keras function with the same name.
:param y: class vector to be converted into a matrix
(integers from 0 to nb_classes).
:param nb_classes: nb_classes: total number of classes.
:param num_classses: depricated version... |
def register_pubkey(self):
"""
XXX Check that the pubkey received is in the group.
"""
p = pkcs_os2ip(self.dh_p)
g = pkcs_os2ip(self.dh_g)
pn = dh.DHParameterNumbers(p, g)
y = pkcs_os2ip(self.dh_Ys)
public_numbers = dh.DHPublicNumbers(y, pn)
s = ... | XXX Check that the pubkey received is in the group. |
def url_read_text(url, verbose=True):
r"""
Directly reads text data from url
"""
data = url_read(url, verbose)
text = data.decode('utf8')
return text | r"""
Directly reads text data from url |
def manage_service_check_result_brok(self, b): # pylint: disable=too-many-branches
"""A service check result brok has just arrived ..."""
host_name = b.data.get('host_name', None)
service_description = b.data.get('service_description', None)
if not host_name or not service_description:
... | A service check result brok has just arrived ... |
def _get_qe(self, key, obj):
"""Instantiate a query engine, or retrieve a cached one.
"""
if key in self._cached:
return self._cached[key]
qe = create_query_engine(obj, self._class)
self._cached[key] = qe
return qe | Instantiate a query engine, or retrieve a cached one. |
def get_assessment_part_ids_by_banks(self, bank_ids):
"""Gets the list of ``AssessmentPart Ids`` corresponding to a list of ``Banks``.
arg: bank_ids (osid.id.IdList): list of bank ``Ids``
return: (osid.id.IdList) - list of assessment part ``Ids``
raise: NullArgument - ``bank_ids`` i... | Gets the list of ``AssessmentPart Ids`` corresponding to a list of ``Banks``.
arg: bank_ids (osid.id.IdList): list of bank ``Ids``
return: (osid.id.IdList) - list of assessment part ``Ids``
raise: NullArgument - ``bank_ids`` is ``null``
raise: OperationFailed - unable to complete r... |
def public_key_sec(self):
"""Return the public key as sec, or None in case of failure."""
if self.is_coinbase():
return None
opcodes = ScriptTools.opcode_list(self.script)
if len(opcodes) == 2 and opcodes[0].startswith("[30"):
# the second opcode is probably the p... | Return the public key as sec, or None in case of failure. |
def read(self, filename=None):
"""Read and parse index file *filename*."""
self._init_filename(filename)
data = odict()
with open(self.real_filename) as ndx:
current_section = None
for line in ndx:
line = line.strip()
if len(line) ... | Read and parse index file *filename*. |
def draw(self):
"""Do not call directly."""
if self.hidden:
return False
if self.background_color is not None:
render.fillrect(self.surface, self.background_color,
rect=pygame.Rect((0, 0), self.frame.size))
for child in self.children... | Do not call directly. |
def set(self, column, value, useMethod=True, **context):
"""
Sets the value for this record at the inputted column
name. If the columnName provided doesn't exist within
the schema, then the ColumnNotFound error will be
raised.
:param columnName | <str>
... | Sets the value for this record at the inputted column
name. If the columnName provided doesn't exist within
the schema, then the ColumnNotFound error will be
raised.
:param columnName | <str>
value | <variant>
:return <bool> changed |
def dump_xearth_markers(markers, name='identifier'):
"""Generate an Xearth compatible marker file.
``dump_xearth_markers()`` writes a simple Xearth_ marker file from
a dictionary of :class:`trigpoints.Trigpoint` objects.
It expects a dictionary in one of the following formats. For support of
:clas... | Generate an Xearth compatible marker file.
``dump_xearth_markers()`` writes a simple Xearth_ marker file from
a dictionary of :class:`trigpoints.Trigpoint` objects.
It expects a dictionary in one of the following formats. For support of
:class:`Trigpoint` that is::
{500936: Trigpoint(52.06603... |
def git_checkout(repo_dir, ref, branch=None):
"""Do a git checkout of `ref` in `repo_dir`.
If branch is specified it should be the name of the new branch.
"""
command = ['git', 'checkout', '--force']
if branch:
command.extend(['-B', '{}'.format(branch)])
command.append(ref)
return e... | Do a git checkout of `ref` in `repo_dir`.
If branch is specified it should be the name of the new branch. |
def setup(self, settings):
'''
Setup redis and tldextract
'''
self.extract = tldextract.TLDExtract()
self.redis_conn = redis.Redis(host=settings['REDIS_HOST'],
port=settings['REDIS_PORT'],
db=settings.get... | Setup redis and tldextract |
def result(self):
"""Formats the result."""
self.__result.sort(cmp = self.__cmp, key = self.__key, reverse = self.__reverse)
return self.__result | Formats the result. |
def make_opfields( cls ):
"""
Calculate the virtulachain-required opfields dict.
"""
# construct fields
opfields = {}
for opname in SERIALIZE_FIELDS.keys():
opcode = NAME_OPCODES[opname]
opfields[opcode] = SERIALIZE_FIELDS[opname]
return ... | Calculate the virtulachain-required opfields dict. |
def allowed_values(self):
"""A tuple containing the allowed values for this Slot.
The Python equivalent of the CLIPS slot-allowed-values function.
"""
data = clips.data.DataObject(self._env)
lib.EnvSlotAllowedValues(
self._env, self._cls, self._name, data.byref)
... | A tuple containing the allowed values for this Slot.
The Python equivalent of the CLIPS slot-allowed-values function. |
def get_next(self, label):
"""Get the next section with the given label"""
while self._get_current_label() != label:
self._skip_section()
return self._read_section() | Get the next section with the given label |
def to_array(self):
"""
Serializes this Chat to a dictionary.
:return: dictionary representation of this object.
:rtype: dict
"""
array = super(Chat, self).to_array()
array['id'] = int(self.id) # type int
array['type'] = u(self.type) # py2: type unicode... | Serializes this Chat to a dictionary.
:return: dictionary representation of this object.
:rtype: dict |
def create_hosted_zone(self, name, caller_reference=None, comment=None):
"""
Creates and returns a new hosted zone. Once a hosted zone is created,
its details can't be changed.
:param str name: The name of the hosted zone to create.
:keyword str caller_reference: A unique string... | Creates and returns a new hosted zone. Once a hosted zone is created,
its details can't be changed.
:param str name: The name of the hosted zone to create.
:keyword str caller_reference: A unique string that identifies the
request and that allows failed create_hosted_zone requests t... |
def i18n_install(lc=None):
"""
Install internationalization support for the clients using the specified locale.
If there is no support for the locale, the default locale will be used.
As last resort, a null translator will be installed.
:param lc: locale to install. If None, the system default local... | Install internationalization support for the clients using the specified locale.
If there is no support for the locale, the default locale will be used.
As last resort, a null translator will be installed.
:param lc: locale to install. If None, the system default locale will be used. |
def from_long(self, number):
"""Make PCI address from long.
in number of type int
"""
if not isinstance(number, baseinteger):
raise TypeError("number can only be an instance of type baseinteger")
self._call("fromLong",
in_p=[number]) | Make PCI address from long.
in number of type int |
def buckets_insert(self, bucket, project_id=None):
"""Issues a request to create a new bucket.
Args:
bucket: the name of the bucket.
project_id: the project to use when inserting the bucket.
Returns:
A parsed bucket information dictionary.
Raises:
Exception if there is an error ... | Issues a request to create a new bucket.
Args:
bucket: the name of the bucket.
project_id: the project to use when inserting the bucket.
Returns:
A parsed bucket information dictionary.
Raises:
Exception if there is an error performing the operation. |
def get_route(self, file_id):
''' a method to retrieve route information for file on telegram api
:param file_id: string with id of file in a message send to bot
:return: dictionary of response details with route details in [json][result]
'''
title = '%s.get_route' % s... | a method to retrieve route information for file on telegram api
:param file_id: string with id of file in a message send to bot
:return: dictionary of response details with route details in [json][result] |
def _equivalent_node_iterator_helper(self, node: BaseEntity, visited: Set[BaseEntity]) -> BaseEntity:
"""Iterate over nodes and their data that are equal to the given node, starting with the original."""
for v in self[node]:
if v in visited:
continue
if self._has... | Iterate over nodes and their data that are equal to the given node, starting with the original. |
def monte_carlo_vol(self, ndraws=10000, rstate=None,
return_overlap=True):
"""Using `ndraws` Monte Carlo draws, estimate the volume of the
*union* of ellipsoids. If `return_overlap=True`, also returns the
estimated fractional overlap with the unit cube."""
if rst... | Using `ndraws` Monte Carlo draws, estimate the volume of the
*union* of ellipsoids. If `return_overlap=True`, also returns the
estimated fractional overlap with the unit cube. |
def read_wave(path):
"""Reads a .wav file.
Takes the path, and returns (PCM audio data, sample rate).
"""
with contextlib.closing(wave.open(path, 'rb')) as wf:
num_channels = wf.getnchannels()
assert num_channels == 1
sample_width = wf.getsampwidth()
assert sample_width ... | Reads a .wav file.
Takes the path, and returns (PCM audio data, sample rate). |
def bytes_available(device):
"""
Determines the number of bytes available for reading from an
AlarmDecoder device
:param device: the AlarmDecoder device
:type device: :py:class:`~alarmdecoder.devices.Device`
:returns: int
"""
bytes_avail = 0
if isinstance(device, alarmdecoder.devi... | Determines the number of bytes available for reading from an
AlarmDecoder device
:param device: the AlarmDecoder device
:type device: :py:class:`~alarmdecoder.devices.Device`
:returns: int |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.