code stringlengths 75 104k | docstring stringlengths 1 46.9k |
|---|---|
def fetch_blob(cls, username, password, multifactor_password=None, client_id=None):
"""Just fetches the blob, could be used to store it locally"""
session = fetcher.login(username, password, multifactor_password, client_id)
blob = fetcher.fetch(session)
fetcher.logout(session)
r... | Just fetches the blob, could be used to store it locally |
def create_gre_tunnel_no_encryption(cls, name, local_endpoint, remote_endpoint,
mtu=0, pmtu_discovery=True, ttl=0,
enabled=True, comment=None):
"""
Create a GRE Tunnel with no encryption. See `create_gre_tunnel_mode` for
constructor description... | Create a GRE Tunnel with no encryption. See `create_gre_tunnel_mode` for
constructor descriptions. |
def _prune_penalty_box(self):
"""Restores clients that have reconnected.
This function should be called first for every public method.
"""
added = False
for client in self.penalty_box.get():
log.info("Client %r is back up.", client)
self.active_clients.ap... | Restores clients that have reconnected.
This function should be called first for every public method. |
def get_signature_candidate(lines):
"""Return lines that could hold signature
The lines should:
* be among last SIGNATURE_MAX_LINES non-empty lines.
* not include first line
* be shorter than TOO_LONG_SIGNATURE_LINE
* not include more than one line that starts with dashes
"""
# non emp... | Return lines that could hold signature
The lines should:
* be among last SIGNATURE_MAX_LINES non-empty lines.
* not include first line
* be shorter than TOO_LONG_SIGNATURE_LINE
* not include more than one line that starts with dashes |
def check_node_parent(
self, resource_id, new_parent_id, db_session=None, *args, **kwargs
):
"""
Checks if parent destination is valid for node
:param resource_id:
:param new_parent_id:
:param db_session:
:return:
"""
return self.service.check... | Checks if parent destination is valid for node
:param resource_id:
:param new_parent_id:
:param db_session:
:return: |
def get_subclass_tree(cls, ensure_unique=True):
"""Returns all subclasses (direct and recursive) of cls."""
subclasses = []
# cls.__subclasses__() fails on classes inheriting from type
for subcls in type.__subclasses__(cls):
subclasses.append(subcls)
subclasses.extend(get_subclass_tree(s... | Returns all subclasses (direct and recursive) of cls. |
def get_json_response_object(self, datatable):
"""
Returns the JSON-compatible dictionary that will be serialized for an AJAX response.
The value names are in the form "s~" for strings, "i~" for integers, and "a~" for arrays,
if you're unfamiliar with the old C-style jargon used in data... | Returns the JSON-compatible dictionary that will be serialized for an AJAX response.
The value names are in the form "s~" for strings, "i~" for integers, and "a~" for arrays,
if you're unfamiliar with the old C-style jargon used in dataTables.js. "aa~" means
"array of arrays". In some instanc... |
def get_callback_function(setting_name, default=None):
"""
Resolve a callback function based on a setting name.
If the setting value isn't set, default is returned. If the setting value
is already a callable function, that value is used - If the setting value
is a string, an attempt is made to import it. Anythi... | Resolve a callback function based on a setting name.
If the setting value isn't set, default is returned. If the setting value
is already a callable function, that value is used - If the setting value
is a string, an attempt is made to import it. Anything else will result in
a failed import causing ImportError t... |
def add_load(self, lv_load):
"""Adds a LV load to _loads and grid graph if not already existing
Parameters
----------
lv_load :
Description #TODO
"""
if lv_load not in self._loads and isinstance(lv_load,
... | Adds a LV load to _loads and grid graph if not already existing
Parameters
----------
lv_load :
Description #TODO |
def compare(left: Union[str, pathlib.Path, _Entity],
right: Union[str, pathlib.Path, _Entity]) -> Comparison:
"""
Compare two paths.
:param left: The left side or "before" entity.
:param right: The right side or "after" entity.
:return: A comparison details what has changed from the lef... | Compare two paths.
:param left: The left side or "before" entity.
:param right: The right side or "after" entity.
:return: A comparison details what has changed from the left side to the
right side. |
def _split_input_slice(batch_size, work_load_list):
"""Get input slice from the input shape.
Parameters
----------
batch_size : int
The number of samples in a mini-batch.
work_load_list : list of float or int, optional
The list of work load for different devices,
in the same... | Get input slice from the input shape.
Parameters
----------
batch_size : int
The number of samples in a mini-batch.
work_load_list : list of float or int, optional
The list of work load for different devices,
in the same order as `ctx`.
Returns
-------
slices : list... |
def get_column_at_index(self, index):
"""
Returns a table column by it's index
:param int index: the zero-indexed position of the column in the table
"""
if index is None:
return None
url = self.build_url(self._endpoints.get('get_column_index'))
respo... | Returns a table column by it's index
:param int index: the zero-indexed position of the column in the table |
def upload_file(request):
'''Upload a Zip File Containing a single file containing media.'''
if request.method == 'POST':
form = MediaForm(request.POST, request.FILES)
if form.is_valid():
context_dict = {}
try:
context_dict['copied_files'] = update_media_f... | Upload a Zip File Containing a single file containing media. |
def standard_block(self, bytes_):
"""Adds a standard block of bytes. For TAP files, it's just the
Low + Hi byte plus the content (here, the bytes plus the checksum)
"""
self.out(self.LH(len(bytes_) + 1)) # + 1 for CHECKSUM byte
checksum = 0
for i in bytes_:
... | Adds a standard block of bytes. For TAP files, it's just the
Low + Hi byte plus the content (here, the bytes plus the checksum) |
def apply_plugin_settings(self, options):
"""Apply configuration file's plugin settings"""
color_scheme_n = 'color_scheme_name'
color_scheme_o = self.get_color_scheme()
font_n = 'plugin_font'
font_o = self.get_plugin_font()
wrap_n = 'wrap'
wrap_o = self.get... | Apply configuration file's plugin settings |
def _allowAnotherAt(cls, parent):
"""You can only create one of these pages per site."""
site = parent.get_site()
if site is None:
return False
return not cls.peers().descendant_of(site.root_page).exists() | You can only create one of these pages per site. |
def wheel_dist_name(self):
"""Return distribution full name with - replaced with _"""
components = (safer_name(self.distribution.get_name()),
safer_version(self.distribution.get_version()))
if self.build_number:
components += (self.build_number,)
return ... | Return distribution full name with - replaced with _ |
def cPrint(self, level, message, *args, **kw):
"""Print a message to the console.
Prints only if level <= self.consolePrinterVerbosity
Printing with level 0 is equivalent to using a print statement,
and should normally be avoided.
:param level: (int) indicating the urgency of the message with
... | Print a message to the console.
Prints only if level <= self.consolePrinterVerbosity
Printing with level 0 is equivalent to using a print statement,
and should normally be avoided.
:param level: (int) indicating the urgency of the message with
lower values meaning more urgent (messages at l... |
def get_filtered_register_graph(register_uri, g):
"""
Gets a filtered version (label, comment, contained item classes & subregisters only) of the each register for the
Register of Registers
:param register_uri: the public URI of the register
:type register_uri: string
:param g: the rdf graph to... | Gets a filtered version (label, comment, contained item classes & subregisters only) of the each register for the
Register of Registers
:param register_uri: the public URI of the register
:type register_uri: string
:param g: the rdf graph to append registers to
:type g: Graph
:return: True if o... |
def _combine(self, applied, shortcut=False):
"""Recombine the applied objects like the original."""
applied_example, applied = peek_at(applied)
coord, dim, positions = self._infer_concat_args(applied_example)
if shortcut:
combined = self._concat_shortcut(applied, dim, positio... | Recombine the applied objects like the original. |
def state_province_region(self, value=None):
"""Corresponds to IDD Field `state_province_region`
Args:
value (str): value for IDD Field `state_province_region`
if `value` is None it will not be checked against the
specification and is assumed to be a missing ... | Corresponds to IDD Field `state_province_region`
Args:
value (str): value for IDD Field `state_province_region`
if `value` is None it will not be checked against the
specification and is assumed to be a missing value
Raises:
ValueError: if `value... |
def reboot_node(node_id, profile, **libcloud_kwargs):
'''
Reboot a node in the cloud
:param node_id: Unique ID of the node to reboot
:type node_id: ``str``
:param profile: The profile key
:type profile: ``str``
:param libcloud_kwargs: Extra arguments for the driver's reboot_node method
... | Reboot a node in the cloud
:param node_id: Unique ID of the node to reboot
:type node_id: ``str``
:param profile: The profile key
:type profile: ``str``
:param libcloud_kwargs: Extra arguments for the driver's reboot_node method
:type libcloud_kwargs: ``dict``
CLI Example:
.. cod... |
def _request(self, method, uri, headers={}, body='', stream=False):
"""
Given a Method, URL, Headers, and Body, perform and HTTP
request, and return a 3-tuple containing the response status,
response headers (as httplib.HTTPMessage), and response body.
"""
response = None... | Given a Method, URL, Headers, and Body, perform and HTTP
request, and return a 3-tuple containing the response status,
response headers (as httplib.HTTPMessage), and response body. |
def create_endpoint_folder(self, endpoint_id, folder):
'''create an endpoint folder, catching the error if it exists.
Parameters
==========
endpoint_id: the endpoint id parameters
folder: the relative path of the folder to create
'''
try:
res = self.transfer_client.oper... | create an endpoint folder, catching the error if it exists.
Parameters
==========
endpoint_id: the endpoint id parameters
folder: the relative path of the folder to create |
def p_union_patch(self, p):
"""union_patch : PATCH uniont ID NL INDENT field_list examples DEDENT"""
p[0] = AstUnionPatch(
path=self.path,
lineno=p[2][1],
lexpos=p[2][2],
name=p[3],
fields=p[6],
examples=p[7],
closed=p[2... | union_patch : PATCH uniont ID NL INDENT field_list examples DEDENT |
def refresh(self, data):
"""
refresh the module(s)
"""
modules = data.get("module")
# for i3status modules we have to refresh the whole i3status output.
update_i3status = False
for module_name in self.find_modules(modules):
module = self.py3_wrapper.ou... | refresh the module(s) |
def getCachedOrUpdatedValue(self, key):
""" Gets the device's value with the given key.
If the key is not found in the cache, the value is queried from the host.
"""
try:
return self._VALUES[key]
except KeyError:
return self.getValue(key) | Gets the device's value with the given key.
If the key is not found in the cache, the value is queried from the host. |
def add_attribute(self, tag, name, value):
""" add an attribute (nam, value pair) to the named tag """
self.add_tag(tag)
d = self._tags[tag]
d[name] = value | add an attribute (nam, value pair) to the named tag |
def plot_world(*args, **kwargs):
"""
Generate a plot from received instance of World and show it.
See also plot_world_with_elegans and plot_world_with_matplotlib.
Parameters
----------
world : World or str
World or a HDF5 filename to render.
interactive : bool, default True
... | Generate a plot from received instance of World and show it.
See also plot_world_with_elegans and plot_world_with_matplotlib.
Parameters
----------
world : World or str
World or a HDF5 filename to render.
interactive : bool, default True
Choose a visualizer. If False, show the plot ... |
def _get_tick_frac_labels(self):
"""Get the major ticks, minor ticks, and major labels"""
minor_num = 4 # number of minor ticks per major division
if (self.axis.scale_type == 'linear'):
domain = self.axis.domain
if domain[1] < domain[0]:
flip = True
... | Get the major ticks, minor ticks, and major labels |
def make_ns(self, ns):
'''
Returns the `lazily` created template namespace.
'''
if self.namespace:
val = {}
val.update(self.namespace)
val.update(ns)
return val
else:
return ns | Returns the `lazily` created template namespace. |
def user(self):
"""Creates a User object when requested."""
try:
return self._user
except AttributeError:
self._user = MatrixUser(self.mxid, self.Api(identity=self.mxid))
return self._user | Creates a User object when requested. |
def smoother_step(F, filt, next_pred, next_smth):
"""Smoothing step of Kalman filter/smoother.
Parameters
----------
F: (dx, dx) numpy array
Mean of X_t | X_{t-1} is F * X_{t-1}
filt: MeanAndCov object
filtering distribution at time t
next_pred: MeanAndCov object
predi... | Smoothing step of Kalman filter/smoother.
Parameters
----------
F: (dx, dx) numpy array
Mean of X_t | X_{t-1} is F * X_{t-1}
filt: MeanAndCov object
filtering distribution at time t
next_pred: MeanAndCov object
predictive distribution at time t+1
next_smth: MeanAndCov ... |
def execute(self):
"""
Invoke the redispy pipeline.execute() method and take all the values
returned in sequential order of commands and map them to the
Future objects we returned when each command was queued inside
the pipeline.
Also invoke all the callback functions que... | Invoke the redispy pipeline.execute() method and take all the values
returned in sequential order of commands and map them to the
Future objects we returned when each command was queued inside
the pipeline.
Also invoke all the callback functions queued up.
:param raise_on_error: ... |
def api_request(
self,
method,
path,
query_params=None,
data=None,
content_type=None,
headers=None,
api_base_url=None,
api_version=None,
expect_json=True,
_target_object=None,
):
"""Make a request over the HTTP transport... | Make a request over the HTTP transport to the API.
You shouldn't need to use this method, but if you plan to
interact with the API using these primitives, this is the
correct one to use.
:type method: str
:param method: The HTTP method name (ie, ``GET``, ``POST``, etc).
... |
def create_constants(self, rdbms):
"""
Factory for creating a Constants objects (i.e. objects for creating constants based on column widths, and auto
increment columns and labels).
:param str rdbms: The target RDBMS (i.e. mysql, mssql or pgsql).
:rtype: pystratum.Constants.Cons... | Factory for creating a Constants objects (i.e. objects for creating constants based on column widths, and auto
increment columns and labels).
:param str rdbms: The target RDBMS (i.e. mysql, mssql or pgsql).
:rtype: pystratum.Constants.Constants |
def reload_cache_config(self, call_params):
"""REST Reload Plivo Cache Config helper
"""
path = '/' + self.api_version + '/ReloadCacheConfig/'
method = 'POST'
return self.request(path, method, call_params) | REST Reload Plivo Cache Config helper |
def get_window_settings(self):
"""Return current window settings
Symetric to the 'set_window_settings' setter"""
window_size = (self.window_size.width(), self.window_size.height())
is_fullscreen = self.isFullScreen()
if is_fullscreen:
is_maximized = self.maximiz... | Return current window settings
Symetric to the 'set_window_settings' setter |
def recv(sock, size):
"""Receives exactly `size` bytes. This function blocks the thread."""
data = sock.recv(size, socket.MSG_WAITALL)
if len(data) < size:
raise socket.error(ECONNRESET, 'Connection closed')
return data | Receives exactly `size` bytes. This function blocks the thread. |
def fork(self, server_address: str = None, *, namespace: str = None) -> "State":
r"""
"Forks" this State object.
Takes the same args as the :py:class:`State` constructor,
except that they automatically default to the values provided during the creation of this State object.
If ... | r"""
"Forks" this State object.
Takes the same args as the :py:class:`State` constructor,
except that they automatically default to the values provided during the creation of this State object.
If no args are provided to this function,
then it shall create a new :py:class:`Stat... |
def timeout(seconds=None, use_signals=True, timeout_exception=TimeoutError, exception_message=None):
"""Add a timeout parameter to a function and return it.
:param seconds: optional time limit in seconds or fractions of a second. If None is passed, no timeout is applied.
This adds some flexibility to t... | Add a timeout parameter to a function and return it.
:param seconds: optional time limit in seconds or fractions of a second. If None is passed, no timeout is applied.
This adds some flexibility to the usage: you can disable timing out depending on the settings.
:type seconds: float
:param use_sign... |
def process_rst_and_summaries(content_generators):
"""
Ensure mathjax script is applied to RST and summaries are
corrected if specified in user settings.
Handles content attached to ArticleGenerator and PageGenerator objects,
since the plugin doesn't know how to handle other Generator types.
F... | Ensure mathjax script is applied to RST and summaries are
corrected if specified in user settings.
Handles content attached to ArticleGenerator and PageGenerator objects,
since the plugin doesn't know how to handle other Generator types.
For reStructuredText content, examine both articles and pages.
... |
def parse_epsv_response(s):
"""
Parsing `EPSV` (`message (|||port|)`) response.
:param s: response line
:type s: :py:class:`str`
:return: (ip, port)
:rtype: (:py:class:`None`, :py:class:`int`)
"""
matches = tuple(re.finditer(r"\((.)\1\1\d+\1\)", s))
... | Parsing `EPSV` (`message (|||port|)`) response.
:param s: response line
:type s: :py:class:`str`
:return: (ip, port)
:rtype: (:py:class:`None`, :py:class:`int`) |
def predict(self, X):
"""Predict the class for X.
The predicted class for each sample in X is returned.
Parameters
----------
X : List of ndarrays, one for each training example.
Each training example's shape is (string1_len,
string2_len, n_features), wh... | Predict the class for X.
The predicted class for each sample in X is returned.
Parameters
----------
X : List of ndarrays, one for each training example.
Each training example's shape is (string1_len,
string2_len, n_features), where string1_len and
s... |
def regions(self):
"""gets the regions value"""
url = "%s/regions" % self.root
params = {"f": "json"}
return self._get(url=url,
param_dict=params,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port) | gets the regions value |
def output_to_json(sources):
"""Print statistics to the terminal in Json format"""
results = OrderedDict()
for source in sources:
if source.get_is_available():
source.update()
source_name = source.get_source_name()
results[source_name] = source.get_sensors_summary... | Print statistics to the terminal in Json format |
def minimumBelow(requestContext, seriesList, n):
"""
Takes one metric or a wildcard seriesList followed by a constant n.
Draws only the metrics with a minimum value below n.
Example::
&target=minimumBelow(system.interface.eth*.packetsSent,1000)
This would only display interfaces which sen... | Takes one metric or a wildcard seriesList followed by a constant n.
Draws only the metrics with a minimum value below n.
Example::
&target=minimumBelow(system.interface.eth*.packetsSent,1000)
This would only display interfaces which sent at one point less than
1000 packets/min. |
def fromlineno(self):
"""The first line that this node appears on in the source code.
:type: int or None
"""
lineno = super(Arguments, self).fromlineno
return max(lineno, self.parent.fromlineno or 0) | The first line that this node appears on in the source code.
:type: int or None |
def permission_required(perm, *lookup_variables, **kwargs):
"""
Decorator for views that checks whether a user has a particular permission
enabled, redirecting to the log-in page if necessary.
"""
login_url = kwargs.pop('login_url', settings.LOGIN_URL)
redirect_field_name = kwargs.pop('redirect_... | Decorator for views that checks whether a user has a particular permission
enabled, redirecting to the log-in page if necessary. |
def update(self, item, dry_run=None):
"""Updates item info in file."""
logger.debug('Updating item. Item: {item} Table: {namespace}'.format(
item=item,
namespace=self.namespace
))
if not dry_run:
self.table.put_item(Item=item)
return item | Updates item info in file. |
def create_session(self, session_id, register=True, session_factory=None):
""" Creates new session object and returns it.
@param session_id: Session id. If not provided, will generate a
new session id.
@param register: Should be the session registered in a storage.
... | Creates new session object and returns it.
@param session_id: Session id. If not provided, will generate a
new session id.
@param register: Should be the session registered in a storage.
Websockets don't need it.
@param session_factory: Use ... |
def is_data_diverging(data_container):
"""
We want to use this to check whether the data are diverging or not.
This is a simple check, can be made much more sophisticated.
:param data_container: A generic container of data points.
:type data_container: `iterable`
"""
assert infer_data_type... | We want to use this to check whether the data are diverging or not.
This is a simple check, can be made much more sophisticated.
:param data_container: A generic container of data points.
:type data_container: `iterable` |
def _fix_up_properties(cls):
"""Fix up the properties by calling their _fix_up() method.
Note: This is called by MetaModel, but may also be called manually
after dynamically updating a model class.
"""
# Verify that _get_kind() returns an 8-bit string.
kind = cls._get_kind()
if not isinstan... | Fix up the properties by calling their _fix_up() method.
Note: This is called by MetaModel, but may also be called manually
after dynamically updating a model class. |
def analyze(data, normalize=None, reduce=None, ndims=None, align=None, internal=False):
"""
Wrapper function for normalize -> reduce -> align transformations.
Parameters
----------
data : numpy array, pandas df, or list of arrays/dfs
The data to analyze
normalize : str or False or None... | Wrapper function for normalize -> reduce -> align transformations.
Parameters
----------
data : numpy array, pandas df, or list of arrays/dfs
The data to analyze
normalize : str or False or None
If set to 'across', the columns of the input data will be z-scored
across lists (de... |
def lset(self, key, index, value):
"""Emulate lset."""
redis_list = self._get_list(key, 'LSET')
if redis_list is None:
raise ResponseError("no such key")
try:
redis_list[index] = self._encode(value)
except IndexError:
raise ResponseError("index... | Emulate lset. |
def create(self, **kwargs):
"""
Create a resource on the server
:params kwargs: Attributes (field names and values) of the new resource
"""
resource = self.resource_class(self.client)
resource.update_from_dict(kwargs)
resource.save(force_create=True)
retu... | Create a resource on the server
:params kwargs: Attributes (field names and values) of the new resource |
def get_stored_content_length(headers):
"""Return the content length (in bytes) of the object as stored in GCS.
x-goog-stored-content-length should always be present except when called via
the local dev_appserver. Therefore if it is not present we default to the
standard content-length header.
Args:
hea... | Return the content length (in bytes) of the object as stored in GCS.
x-goog-stored-content-length should always be present except when called via
the local dev_appserver. Therefore if it is not present we default to the
standard content-length header.
Args:
headers: a dict of headers from the http respons... |
def make_key(table_name, objid):
"""Create an object key for storage."""
key = datastore.Key()
path = key.path_element.add()
path.kind = table_name
path.name = str(objid)
return key | Create an object key for storage. |
def main():
""" Main function """
ctx = {}
def pretty_json(data):
return json.dumps(data, indent=2, sort_keys=True)
client = server.create_app().test_client()
host = 'example.com:9984'
# HTTP Index
res = client.get('/', environ_overrides={'HTTP_HOST': host})
res_data = json.... | Main function |
def sort(self, values):
"""Sort the values in-place based on the connectors in the network."""
for level in self:
for wire1, wire2 in level:
if values[wire1] > values[wire2]:
values[wire1], values[wire2] = values[wire2], values[wire1] | Sort the values in-place based on the connectors in the network. |
def plistfilename(self):
'''
This is a lazily detected absolute filename of the corresponding
property list file (*.plist). None if it doesn't exist.
'''
if self._plist_fname is None:
self._plist_fname = discover_filename(self.label)
return self._plist_fname | This is a lazily detected absolute filename of the corresponding
property list file (*.plist). None if it doesn't exist. |
def _error_if_word_invalid(word,
valid_words_dictionary,
technical_words_dictionary,
line_offset,
col_offset):
"""Return SpellcheckError if this non-technical word is invalid."""
word_lower = word.lower()... | Return SpellcheckError if this non-technical word is invalid. |
def _assert_command_dict(self, struct, name, path=None, extra_info=None):
"""Checks whether struct is a command dict (e.g. it's a dict and has 1 key-value pair."""
self._assert_dict(struct, name, path, extra_info)
if len(struct) != 1:
err = [self._format_error_path(path + [name])]
... | Checks whether struct is a command dict (e.g. it's a dict and has 1 key-value pair. |
def create_api(name, description, cloneFrom=None,
region=None, key=None, keyid=None, profile=None):
'''
Create a new REST API Service with the given name
Returns {created: True} if the rest api was created and returns
{created: False} if the rest api was not created.
CLI Example:
... | Create a new REST API Service with the given name
Returns {created: True} if the rest api was created and returns
{created: False} if the rest api was not created.
CLI Example:
.. code-block:: bash
salt myminion boto_apigateway.create_api myapi_name api_description |
def cur_time(typ='date', tz=DEFAULT_TZ) -> (datetime.date, str):
"""
Current time
Args:
typ: one of ['date', 'time', 'time_path', 'raw', '']
tz: timezone
Returns:
relevant current time or date
Examples:
>>> cur_dt = pd.Timestamp('now')
>>> cur_time(typ='dat... | Current time
Args:
typ: one of ['date', 'time', 'time_path', 'raw', '']
tz: timezone
Returns:
relevant current time or date
Examples:
>>> cur_dt = pd.Timestamp('now')
>>> cur_time(typ='date') == cur_dt.strftime('%Y-%m-%d')
True
>>> cur_time(typ='tim... |
def do_refresh(self,args):
"""Refresh the view of the log group"""
# prints all the groups: pprint(AwsConnectionFactory.getLogClient().describe_log_groups())
response = AwsConnectionFactory.getLogClient().describe_log_groups(logGroupNamePrefix=self.stackResource.physical_resource_id)
if... | Refresh the view of the log group |
def patch_sys(self, inherit_path):
"""Patch sys with all site scrubbed."""
def patch_dict(old_value, new_value):
old_value.clear()
old_value.update(new_value)
def patch_all(path, path_importer_cache, modules):
sys.path[:] = path
patch_dict(sys.path_importer_cache, path_importer_cach... | Patch sys with all site scrubbed. |
def _attach_record_as_json(mfg_event, record):
"""Attach a copy of the record as JSON so we have an un-mangled copy."""
attachment = mfg_event.attachment.add()
attachment.name = TEST_RECORD_ATTACHMENT_NAME
test_record_dict = htf_data.convert_to_base_types(record)
attachment.value_binary = _convert_object_to_j... | Attach a copy of the record as JSON so we have an un-mangled copy. |
def filesfile_string(self):
"""String with the list of files and prefixes needed to execute ABINIT."""
lines = []
app = lines.append
#optic.in ! Name of input file
#optic.out ! Unused
#optic ! Root name for all files that will be produced
app(self.i... | String with the list of files and prefixes needed to execute ABINIT. |
def update_stats(stats, start_time, data):
'''
Calculate the master stats and return the updated stat info
'''
end_time = time.time()
cmd = data['cmd']
# the jid is used as the create time
try:
jid = data['jid']
except KeyError:
try:
jid = data['data']['__pub_... | Calculate the master stats and return the updated stat info |
def slaveraise(self, type, error, traceback):
""" slave only """
message = 'E' * 1 + pickle.dumps((type,
''.join(tb.format_exception(type, error, traceback))))
if self.pipe is not None:
self.pipe.put(message) | slave only |
def perform_iteration(self):
"""Get any changes to the log files and push updates to Redis."""
stats = self.get_all_stats()
self.redis_client.publish(
self.redis_key,
jsonify_asdict(stats),
) | Get any changes to the log files and push updates to Redis. |
def laplacian(script, iterations=1, boundary=True, cotangent_weight=True,
selected=False):
""" Laplacian smooth of the mesh: for each vertex it calculates the average
position with nearest vertex
Args:
script: the FilterScript object or script filename to write
the fil... | Laplacian smooth of the mesh: for each vertex it calculates the average
position with nearest vertex
Args:
script: the FilterScript object or script filename to write
the filter to.
iterations (int): The number of times that the whole algorithm (normal
smoothing + ve... |
def one_line(self):
"""Return True|False if the AMP shoukd be displayed in oneline (one_lineline=true|false)."""
ret = self.get('one_line')
if ret is None:
return False
else:
return ret.lower().startswith('true') | Return True|False if the AMP shoukd be displayed in oneline (one_lineline=true|false). |
def speed(self):
'''Return the current transfer speed.
Returns:
int: The speed in bytes per second.
'''
if self._stalled:
return 0
time_sum = 0
data_len_sum = 0
for time_diff, data_len in self._samples:
time_sum += time_diff
... | Return the current transfer speed.
Returns:
int: The speed in bytes per second. |
def add_to_manifest(self, manifest):
"""
Add useful details to the manifest about this service
so that it can be used in an application.
:param manifest: An predix.admin.app.Manifest object
instance that manages reading/writing manifest config
for a cloud foundry... | Add useful details to the manifest about this service
so that it can be used in an application.
:param manifest: An predix.admin.app.Manifest object
instance that manages reading/writing manifest config
for a cloud foundry app. |
def chunks(iterable, size=1):
"""Splits iterator in chunks."""
iterator = iter(iterable)
for element in iterator:
yield chain([element], islice(iterator, size - 1)) | Splits iterator in chunks. |
def nvmlDeviceGetPcieReplayCounter(handle):
r"""
/**
* Retrieve the PCIe replay counter.
*
* For Kepler &tm; or newer fully supported devices.
*
* @param device The identifier of the target device
* @param value Reference... | r"""
/**
* Retrieve the PCIe replay counter.
*
* For Kepler &tm; or newer fully supported devices.
*
* @param device The identifier of the target device
* @param value Reference in which to return the counter's value
*
... |
def parse_san(self, san: str) -> Move:
"""
Uses the current position as the context to parse a move in standard
algebraic notation and returns the corresponding move object.
The returned move is guaranteed to be either legal or a null move.
:raises: :exc:`ValueError` if the SAN... | Uses the current position as the context to parse a move in standard
algebraic notation and returns the corresponding move object.
The returned move is guaranteed to be either legal or a null move.
:raises: :exc:`ValueError` if the SAN is invalid or ambiguous. |
def get_separator_words(toks1):
"""
Finds the words that separate a list of tokens from a background corpus
Basically this generates a list of informative/interesting words in a set
toks1 is a list of words
Returns a list of separator words
"""
tab_toks1 = nltk.FreqDist(word.lower() for word... | Finds the words that separate a list of tokens from a background corpus
Basically this generates a list of informative/interesting words in a set
toks1 is a list of words
Returns a list of separator words |
def _soap_client_call(method_name, *args):
"""Wrapper to call SoapClient method"""
# a new client instance is built for threading issues
soap_client = _build_soap_client()
soap_args = _convert_soap_method_args(*args)
# if pysimplesoap version requires it, apply a workaround for
# https://github.... | Wrapper to call SoapClient method |
def _configure_registry(self, include_process_stats: bool = False):
"""Configure the MetricRegistry."""
if include_process_stats:
self.registry.register_additional_collector(
ProcessCollector(registry=None)) | Configure the MetricRegistry. |
def configure(config={}, datastore=None, nested=False):
"""
Useful for when you need to control Switchboard's setup
"""
if nested:
config = nested_config(config)
# Re-read settings to make sure we have everything.
# XXX It would be really nice if we didn't need to do this.
Settings.i... | Useful for when you need to control Switchboard's setup |
def loads(s, encoding=None, cls=None, object_hook=None, **kw):
"""
Deserialize ``s`` (a ``str`` or ``unicode`` instance containing a JSON
document) to a Python object.
If ``s`` is a ``str`` instance and is encoded with an ASCII based encoding
other than utf-8 (e.g. latin-1) then an appropriate ``en... | Deserialize ``s`` (a ``str`` or ``unicode`` instance containing a JSON
document) to a Python object.
If ``s`` is a ``str`` instance and is encoded with an ASCII based encoding
other than utf-8 (e.g. latin-1) then an appropriate ``encoding`` name
must be specified. Encodings that are not ASCII based (s... |
def emit(
self, tup, stream=None, anchors=None, direct_task=None, need_task_ids=False
):
"""Emit a new Tuple to a stream.
:param tup: the Tuple payload to send to Storm, should contain only
JSON-serializable data.
:type tup: :class:`list` or :class:`pystorm.compo... | Emit a new Tuple to a stream.
:param tup: the Tuple payload to send to Storm, should contain only
JSON-serializable data.
:type tup: :class:`list` or :class:`pystorm.component.Tuple`
:param stream: the ID of the stream to emit this Tuple to. Specify
``... |
def _restore_file_lmt(self):
# type: (Descriptor) -> None
"""Restore file lmt for file
:param Descriptor self: this
"""
if not self._restore_file_properties.lmt or self._ase.lmt is None:
return
# timestamp() func is not available in py27
ts = time.mkti... | Restore file lmt for file
:param Descriptor self: this |
def compare_mim_panels(self, existing_panel, new_panel):
"""Check if the latest version of OMIM differs from the most recent in database
Return all genes that where not in the previous version.
Args:
existing_panel(dict)
new_panel(dict)
Returns:
n... | Check if the latest version of OMIM differs from the most recent in database
Return all genes that where not in the previous version.
Args:
existing_panel(dict)
new_panel(dict)
Returns:
new_genes(set(str)) |
def cd(path):
'''Creates the path if it doesn't exist'''
old_dir = os.getcwd()
try:
os.makedirs(path)
except OSError:
pass
os.chdir(path)
try:
yield
finally:
os.chdir(old_dir) | Creates the path if it doesn't exist |
def service_define(self, service, ty):
"""
Add a service variable of type ``ty`` to this model
:param str service: variable name
:param type ty: variable type
:return: None
"""
assert service not in self._data
assert service not in self._algebs + self._s... | Add a service variable of type ``ty`` to this model
:param str service: variable name
:param type ty: variable type
:return: None |
def get_my_ip():
"""Returns this computers IP address as a string."""
ip = subprocess.check_output(GET_IP_CMD, shell=True).decode('utf-8')[:-1]
return ip.strip() | Returns this computers IP address as a string. |
def cast_item(cls, item):
"""Cast list item to the appropriate tag type."""
if not isinstance(item, cls.subtype):
incompatible = isinstance(item, Base) and not any(
issubclass(cls.subtype, tag_type) and isinstance(item, tag_type)
for tag_type in cls.all_t... | Cast list item to the appropriate tag type. |
def capture_working_directory(self):
"""
Returns a working directory where to temporary store packet capture files.
:returns: path to the directory
"""
workdir = os.path.join(self._path, "tmp", "captures")
if not self._deleted:
try:
os.makedi... | Returns a working directory where to temporary store packet capture files.
:returns: path to the directory |
def _histogram_fixed_binsize(a, start, width, n):
"""histogram_even(a, start, width, n) -> histogram
Return an histogram where the first bin counts the number of lower
outliers and the last bin the number of upper outliers. Works only with
fixed width bins.
:Stochastics:
a : array
Ar... | histogram_even(a, start, width, n) -> histogram
Return an histogram where the first bin counts the number of lower
outliers and the last bin the number of upper outliers. Works only with
fixed width bins.
:Stochastics:
a : array
Array of samples.
start : float
Left-most bin... |
def stack(self, k=5, stratify=False, shuffle=True, seed=100, full_test=True, add_diff=False):
"""Stacks sequence of models.
Parameters
----------
k : int, default 5
Number of folds.
stratify : bool, default False
shuffle : bool, default True
seed : i... | Stacks sequence of models.
Parameters
----------
k : int, default 5
Number of folds.
stratify : bool, default False
shuffle : bool, default True
seed : int, default 100
full_test : bool, default True
If True then evaluate test dataset on ... |
def filter_data(data, filter_dict):
""" filter a data dictionary for values only matching the filter """
for key, match_string in filter_dict.items():
if key not in data:
logger.warning("{0} doesn't match a top level key".format(key))
continue
values = data[key]
m... | filter a data dictionary for values only matching the filter |
def threadpooled(
func: typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]],
*,
loop_getter: typing.Union[typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop],
loop_getter_need_context: bool = False,
) -> typing.Callable[..., "asyncio.Task[typing.Any]"]:... | Overload: function callable, loop getter available. |
def get_events(self, service_location_id, appliance_id, start, end,
max_number=None):
"""
Request events for a given appliance
Parameters
----------
service_location_id : int
appliance_id : int
start : int | dt.datetime | pd.Timestamp
e... | Request events for a given appliance
Parameters
----------
service_location_id : int
appliance_id : int
start : int | dt.datetime | pd.Timestamp
end : int | dt.datetime | pd.Timestamp
start and end support epoch (in milliseconds),
datetime and Pan... |
def fmt(self):
"""Make printable representation out of this instance.
"""
tmpl = string.Template(self.template)
kw = {}
for key, val in self.kw.items():
if key == 'phrase':
kw[key] = val
else:
kw[key] = val.fmt()
ret... | Make printable representation out of this instance. |
def find_by_id(self, section, params={}, **options):
"""Returns the complete record for a single section.
Parameters
----------
section : {Id} The section to get.
[params] : {Object} Parameters for the request
"""
path = "/sections/%s" % (section)
return... | Returns the complete record for a single section.
Parameters
----------
section : {Id} The section to get.
[params] : {Object} Parameters for the request |
def get_header(self, hdrclass, returnval=None):
'''
Return the first header object that is of
class hdrclass, or None if the header class isn't
found.
'''
if isinstance(hdrclass, str):
return self.get_header_by_name(hdrclass)
for hdr in self._headers:... | Return the first header object that is of
class hdrclass, or None if the header class isn't
found. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.