code stringlengths 75 104k | docstring stringlengths 1 46.9k |
|---|---|
def get_supercell_matrix(self, supercell, struct):
"""
Returns the matrix for transforming struct to supercell. This
can be used for very distorted 'supercells' where the primitive cell
is impossible to find
"""
if self._primitive_cell:
raise ValueError("get_s... | Returns the matrix for transforming struct to supercell. This
can be used for very distorted 'supercells' where the primitive cell
is impossible to find |
def GetSOAPEnvUri(self, version):
"""Return the appropriate SOAP envelope uri for a given
human-friendly SOAP version string (e.g. '1.1')."""
attrname = 'NS_SOAP_ENV_%s' % join(split(version, '.'), '_')
value = getattr(self, attrname, None)
if value is not None:
re... | Return the appropriate SOAP envelope uri for a given
human-friendly SOAP version string (e.g. '1.1'). |
def name(value, known_modules=[]):
'''Return a name that can be imported, to serialize/deserialize an object'''
if value is None:
return 'None'
if not isinstance(value, type): # Get the class name first
value = type(value)
tname = value.__name__
if hasattr(builtins, tname):
... | Return a name that can be imported, to serialize/deserialize an object |
def _generate_components(self, X):
"""Generate components of hidden layer given X"""
rs = check_random_state(self.random_state)
if (self._use_mlp_input):
self._compute_biases(rs)
self._compute_weights(X, rs)
if (self._use_rbf_input):
self._compute_ce... | Generate components of hidden layer given X |
def dispatch(self, command, app):
"""
Function runs the active command.
Args
----
command (glim.command.Command): the command object.
app (glim.app.App): the glim app object.
Note:
Exception handling should be done in Command class
itself... | Function runs the active command.
Args
----
command (glim.command.Command): the command object.
app (glim.app.App): the glim app object.
Note:
Exception handling should be done in Command class
itself. If not, an unhandled exception may result
... |
def _dump_spec(spec):
"""Dump bel specification dictionary using YAML
Formats this with an extra indentation for lists to make it easier to
use cold folding on the YAML version of the spec dictionary.
"""
with open("spec.yaml", "w") as f:
yaml.dump(spec, f, Dumper=MyDumper, default_flow_sty... | Dump bel specification dictionary using YAML
Formats this with an extra indentation for lists to make it easier to
use cold folding on the YAML version of the spec dictionary. |
def _z2deriv(self,R,z,phi=0.,t=0.):
"""
NAME:
_z2deriv
PURPOSE:
evaluate the second vertical derivative for this potential
INPUT:
R - Galactocentric cylindrical radius
z - vertical height
phi - azimuth
t- time
... | NAME:
_z2deriv
PURPOSE:
evaluate the second vertical derivative for this potential
INPUT:
R - Galactocentric cylindrical radius
z - vertical height
phi - azimuth
t- time
OUTPUT:
the second vertical derivative
... |
def mrca_matrix(self):
'''Return a dictionary storing all pairwise MRCAs. ``M[u][v]`` = MRCA of nodes ``u`` and ``v``. Excludes ``M[u][u]`` because MRCA of node and itself is itself
Returns:
``dict``: ``M[u][v]`` = MRCA of nodes ``u`` and ``v``
'''
M = dict()
leaves_... | Return a dictionary storing all pairwise MRCAs. ``M[u][v]`` = MRCA of nodes ``u`` and ``v``. Excludes ``M[u][u]`` because MRCA of node and itself is itself
Returns:
``dict``: ``M[u][v]`` = MRCA of nodes ``u`` and ``v`` |
def p_concat_list(p):
"""
concat_list : expr_list SEMI expr_list
| concat_list SEMI expr_list
"""
if p[1].__class__ == node.expr_list:
p[0] = node.concat_list([p[1], p[3]])
else:
p[0] = p[1]
p[0].append(p[3]) | concat_list : expr_list SEMI expr_list
| concat_list SEMI expr_list |
def param_projection(self, x_param, y_param, metric):
"""
Projects the grid search results onto 2 dimensions.
The wrapped GridSearch object is assumed to be fit already.
The display value is taken as the max over the non-displayed dimensions.
Parameters
----------
... | Projects the grid search results onto 2 dimensions.
The wrapped GridSearch object is assumed to be fit already.
The display value is taken as the max over the non-displayed dimensions.
Parameters
----------
x_param : string
The name of the parameter to be visualized... |
def is_driver(self):
"""Check whether the file is a Windows driver.
This will return true only if there are reliable indicators of the image
being a driver.
"""
# Checking that the ImageBase field of the OptionalHeader is above or
# equal to 0x80000000 (that is,... | Check whether the file is a Windows driver.
This will return true only if there are reliable indicators of the image
being a driver. |
def thousands(x):
"""
>>> thousands(12345)
'12,345'
"""
import locale
try:
locale.setlocale(locale.LC_ALL, "en_US.utf8")
except Exception:
locale.setlocale(locale.LC_ALL, "en_US.UTF-8")
finally:
s = '%d' % x
groups = []
while s and s[-1].isdigit():... | >>> thousands(12345)
'12,345' |
def minimal_raw_seqs(self):
''' m.minimal_raw_seqs() -- Return minimal list of seqs that represent consensus '''
seqs = [[], []]
for letter in self.oneletter:
if one2two.has_key(letter):
seqs[0].append(one2two[letter][0])
seqs[1].append(one2two[letter]... | m.minimal_raw_seqs() -- Return minimal list of seqs that represent consensus |
def slice_slice(old_slice, applied_slice, size):
"""Given a slice and the size of the dimension to which it will be applied,
index it with another slice to return a new slice equivalent to applying
the slices sequentially
"""
step = (old_slice.step or 1) * (applied_slice.step or 1)
# For now, u... | Given a slice and the size of the dimension to which it will be applied,
index it with another slice to return a new slice equivalent to applying
the slices sequentially |
def loaded(self, request, *args, **kwargs):
"""Return a list of loaded Packs.
"""
serializer = self.get_serializer(list(Pack.objects.all()),
many=True)
return Response(serializer.data) | Return a list of loaded Packs. |
def _encrypt_asymmetric(self,
encryption_algorithm,
encryption_key,
plain_text,
padding_method,
hashing_algorithm=None):
"""
Encrypt data using asymmetric encryptio... | Encrypt data using asymmetric encryption.
Args:
encryption_algorithm (CryptographicAlgorithm): An enumeration
specifying the asymmetric encryption algorithm to use for
encryption. Required.
encryption_key (bytes): The bytes of the public key to use for
... |
def get_graphql_schema_from_orientdb_schema_data(schema_data, class_to_field_type_overrides=None,
hidden_classes=None):
"""Construct a GraphQL schema from an OrientDB schema.
Args:
schema_data: list of dicts describing the classes in the OrientDB schema.... | Construct a GraphQL schema from an OrientDB schema.
Args:
schema_data: list of dicts describing the classes in the OrientDB schema. The following
format is the way the data is structured in OrientDB 2. See
the README.md file for an example of how to query this data... |
def _data_block(stream):
"""Process data block of ``CTfile``.
:param stream: Queue containing lines of text.
:type stream: :py:class:`collections.deque`
:return: Tuples of data.
:rtype: :class:`~ctfile.tokenizer.DataHeader` or :class:`~ctfile.tokenizer.DataItem`
"""
while len(stream) > ... | Process data block of ``CTfile``.
:param stream: Queue containing lines of text.
:type stream: :py:class:`collections.deque`
:return: Tuples of data.
:rtype: :class:`~ctfile.tokenizer.DataHeader` or :class:`~ctfile.tokenizer.DataItem` |
def CompressStream(in_stream, length=None, compresslevel=2,
chunksize=16777216):
"""Compresses an input stream into a file-like buffer.
This reads from the input stream until either we've stored at least length
compressed bytes, or the input stream has been exhausted.
This supports... | Compresses an input stream into a file-like buffer.
This reads from the input stream until either we've stored at least length
compressed bytes, or the input stream has been exhausted.
This supports streams of unknown size.
Args:
in_stream: The input stream to read from.
length: The t... |
def get_endpoint_map(self):
"""
returns API version and endpoint map
"""
log.debug("getting end points...")
cmd, url = DEVICE_URLS["get_endpoint_map"]
return self._exec(cmd, url) | returns API version and endpoint map |
def joint_plot(x, y, marginalBins=50, gridsize=50, plotlimits=None, logscale_cmap=False, logscale_marginals=False, alpha_hexbin=0.75, alpha_marginals=0.75, cmap="inferno_r", marginalCol=None, figsize=(8, 8), fontsize=8, *args, **kwargs):
"""
Plots some x and y data using hexbins along with a colorbar
and ma... | Plots some x and y data using hexbins along with a colorbar
and marginal distributions (X and Y histograms).
Parameters
----------
x : ndarray
The x data
y : ndarray
The y data
marginalBins : int, optional
The number of bins to use in calculating the marginal
his... |
def get_training_data(batch_size):
""" helper function to get dataloader"""
return gluon.data.DataLoader(
CIFAR10(train=True, transform=transformer),
batch_size=batch_size, shuffle=True, last_batch='discard') | helper function to get dataloader |
def GetRootFileEntry(self):
"""Retrieves the root file entry.
Returns:
TSKPartitionFileEntry: a file entry or None of not available.
"""
path_spec = tsk_partition_path_spec.TSKPartitionPathSpec(
location=self.LOCATION_ROOT, parent=self._path_spec.parent)
return self.GetFileEntryByPath... | Retrieves the root file entry.
Returns:
TSKPartitionFileEntry: a file entry or None of not available. |
async def get_response_metadata(response: str) -> str:
"""
Parse transaction response to fetch metadata.
The important use case for this method is validation of Node's response freshens.
Distributed Ledgers can reply with outdated information for consequence read request after write.
To reduce ... | Parse transaction response to fetch metadata.
The important use case for this method is validation of Node's response freshens.
Distributed Ledgers can reply with outdated information for consequence read request after write.
To reduce pool load libindy sends read requests to one random node in the pool... |
def p_tag_ref(self, p):
'tag_ref : ID'
p[0] = AstTagRef(self.path, p.lineno(1), p.lexpos(1), p[1]) | tag_ref : ID |
def migrate(uri: str, archive_uri: str, case_id: str, dry: bool, force: bool):
"""Update all information that was manually annotated from a old instance."""
scout_client = MongoClient(uri)
scout_database = scout_client[uri.rsplit('/', 1)[-1]]
scout_adapter = MongoAdapter(database=scout_database)
sco... | Update all information that was manually annotated from a old instance. |
def handle_bind_iq_set(self, stanza):
"""Handler <iq type="set"/> for resource binding."""
# pylint: disable-msg=R0201
if not self.stream:
logger.error("Got bind stanza before stream feature has been set")
return False
if self.stream.initiator:
return ... | Handler <iq type="set"/> for resource binding. |
def get_layer_heights(heights, depth, *args, **kwargs):
"""Return an atmospheric layer from upper air data with the requested bottom and depth.
This function will subset an upper air dataset to contain only the specified layer using
the heights only.
Parameters
----------
heights : array-like
... | Return an atmospheric layer from upper air data with the requested bottom and depth.
This function will subset an upper air dataset to contain only the specified layer using
the heights only.
Parameters
----------
heights : array-like
Atmospheric heights
depth : `pint.Quantity`
... |
def list_messages(self):
"""Output full messages list documentation in ReST format. """
messages = sorted(self._messages_definitions.values(), key=lambda m: m.msgid)
for message in messages:
if not message.may_be_emitted():
continue
print(message.format_he... | Output full messages list documentation in ReST format. |
def fastqIterator(fn, verbose=False, allowNameMissmatch=False):
"""
A generator function which yields FastqSequence objects read from a file or
stream. This is a general function which wraps fastqIteratorSimple. In
future releases, we may allow dynamic switching of which base iterator is
used.
:p... | A generator function which yields FastqSequence objects read from a file or
stream. This is a general function which wraps fastqIteratorSimple. In
future releases, we may allow dynamic switching of which base iterator is
used.
:param fn: A file-like stream or a string; if this is a
... |
def curve_points(self, beginframe, endframe, framestep, birthframe, startframe, stopframe, deathframe,
filternone=True, noiseframe=None):
"""
returns a list of frames from startframe to stopframe, in steps of framestepj
warning: the list of points may include "None" elements... | returns a list of frames from startframe to stopframe, in steps of framestepj
warning: the list of points may include "None" elements
:param beginframe: first frame to include in list of points
:param endframe: last frame to include in list of points
:param framestep: framestep
... |
def zip(self, destination: typing.Union[str, Path] = None, encode: bool = True) -> str:
"""
Write mission, dictionary etc. to a MIZ file
Args:
destination: target MIZ file (if none, defaults to source MIZ + "_EMIZ"
Returns: destination file
"""
if encode:
... | Write mission, dictionary etc. to a MIZ file
Args:
destination: target MIZ file (if none, defaults to source MIZ + "_EMIZ"
Returns: destination file |
def _loadFromHStream(self, dtype: HStream, bitAddr: int) -> int:
"""
Parse HUnion type to this transaction template instance
:return: address of it's end
"""
ch = TransTmpl(dtype.elmType, 0, parent=self, origin=self.origin)
self.children.append(ch)
return bitAddr... | Parse HUnion type to this transaction template instance
:return: address of it's end |
def add_to_manifest(self, manifest):
"""
Add useful details to the manifest about this service
so that it can be used in an application.
:param manifest: An predix.admin.app.Manifest object
instance that manages reading/writing manifest config
for a cloud foundry... | Add useful details to the manifest about this service
so that it can be used in an application.
:param manifest: An predix.admin.app.Manifest object
instance that manages reading/writing manifest config
for a cloud foundry app. |
def create_checklist_item(self, card_id, checklist_id, checklistitem_json, **kwargs):
'''
Create a ChecklistItem object from JSON object
'''
return self.client.create_checklist_item(card_id, checklist_id, checklistitem_json, **kwargs) | Create a ChecklistItem object from JSON object |
def iteritems(self, **options):
'''Return a query interator with (id, object) pairs.'''
iter = self.query(**options)
while True:
obj = iter.next()
yield (obj.id, obj) | Return a query interator with (id, object) pairs. |
def getServiceDependenciesUIDs(self):
"""
This methods returns a list with the service dependencies UIDs
:return: a list of uids
"""
deps = self.getServiceDependencies()
deps_uids = [service.UID() for service in deps]
return deps_uids | This methods returns a list with the service dependencies UIDs
:return: a list of uids |
def git_remote(self):
"""
If the distribution is installed via git, return the first URL of the
'origin' remote if one is configured for the repo, or else the first
URL of the lexicographically-first remote, or else None.
:return: origin or first remote URL
:rtype: :py:o... | If the distribution is installed via git, return the first URL of the
'origin' remote if one is configured for the repo, or else the first
URL of the lexicographically-first remote, or else None.
:return: origin or first remote URL
:rtype: :py:obj:`str` or :py:data:`None` |
def _search(self, mdb, query, filename, season_num, episode_num, auto=False):
""" Search the movie using all available datasources and let the user
select a result. Return the choosen datasource and produced movie dict.
If auto is enabled, directly returns the first movie found.
"""... | Search the movie using all available datasources and let the user
select a result. Return the choosen datasource and produced movie dict.
If auto is enabled, directly returns the first movie found. |
def config(conf, confdefs):
'''
Initialize a config dict using the given confdef tuples.
'''
conf = conf.copy()
# for now just populate defval
for name, info in confdefs:
conf.setdefault(name, info.get('defval'))
return conf | Initialize a config dict using the given confdef tuples. |
def hierarchical_match(d, k, default=None):
"""
Match a key against a dict, simplifying element at a time
:param df: DataFrame
:type df: pandas.DataFrame
:param level: Level of DataFrame index to extract IDs from
:type level: int or str
:return: hiearchically matched value or default
"... | Match a key against a dict, simplifying element at a time
:param df: DataFrame
:type df: pandas.DataFrame
:param level: Level of DataFrame index to extract IDs from
:type level: int or str
:return: hiearchically matched value or default |
def add_clause(self, clause, soft=False):
"""
The method for adding a new hard of soft clause to the problem
formula. Although the input formula is to be specified as an
argument of the constructor of :class:`LBX`, adding clauses may be
helpful when *enumerating* ... | The method for adding a new hard of soft clause to the problem
formula. Although the input formula is to be specified as an
argument of the constructor of :class:`LBX`, adding clauses may be
helpful when *enumerating* MCSes of the formula. This way, the
clauses are added ... |
def Delete(self):
"""Delete this source restriction and commit change to cloud.
>>> clc.v2.Server("WA1BTDIX01").PublicIPs().public_ips[0].source_restrictions[0].Delete().WaitUntilComplete()
0
"""
self.public_ip.source_restrictions = [o for o in self.public_ip.source_restrictions if o!=self]
return(self.... | Delete this source restriction and commit change to cloud.
>>> clc.v2.Server("WA1BTDIX01").PublicIPs().public_ips[0].source_restrictions[0].Delete().WaitUntilComplete()
0 |
def annual_event_counts_card(kind='all', current_year=None):
"""
Displays years and the number of events per year.
kind is an Event kind (like 'cinema', 'gig', etc.) or 'all' (default).
current_year is an optional date object representing the year we're already
showing information about.
""... | Displays years and the number of events per year.
kind is an Event kind (like 'cinema', 'gig', etc.) or 'all' (default).
current_year is an optional date object representing the year we're already
showing information about. |
def add_random_tile(self):
"""Adds a random tile to the grid. Assumes that it has empty fields."""
x_pos, y_pos = np.where(self._state == 0)
assert len(x_pos) != 0
empty_index = np.random.choice(len(x_pos))
value = np.random.choice([1, 2], p=[0.9, 0.1])
self._state[x_po... | Adds a random tile to the grid. Assumes that it has empty fields. |
def add_pool_member(self, name, port, pool_name):
'''
Add a node to a pool
'''
if not self.check_pool(pool_name):
raise CommandExecutionError(
'{0} pool does not exists'.format(pool_name)
)
members_seq = self.bigIP.LocalLB.Pool.typefactory... | Add a node to a pool |
def connect(self, cback, subscribers=None, instance=None):
"""Add a function or a method as an handler of this signal.
Any handler added can be a coroutine.
:param cback: the callback (or *handler*) to be added to the set
:returns: ``None`` or the value returned by the corresponding wr... | Add a function or a method as an handler of this signal.
Any handler added can be a coroutine.
:param cback: the callback (or *handler*) to be added to the set
:returns: ``None`` or the value returned by the corresponding wrapper |
def getWmWindowType(self, win, str=False):
"""
Get the list of window types of the given window (property
_NET_WM_WINDOW_TYPE).
:param win: the window object
:param str: True to get a list of string types instead of int
:return: list of (int|str)
"""
type... | Get the list of window types of the given window (property
_NET_WM_WINDOW_TYPE).
:param win: the window object
:param str: True to get a list of string types instead of int
:return: list of (int|str) |
def map(self, func, *columns):
"""
Map a function to rows, or to given columns
"""
if not columns:
return map(func, self.rows)
else:
values = (self.values(column) for column in columns)
result = [map(func, v) for v in values]
if len... | Map a function to rows, or to given columns |
def reduce_dimensionality(self, data):
"""
Reduces the dimensionality of the provided Instance or Instances object.
:param data: the data to process
:type data: Instances
:return: the reduced dataset
:rtype: Instances
"""
if type(data) is Instance:
... | Reduces the dimensionality of the provided Instance or Instances object.
:param data: the data to process
:type data: Instances
:return: the reduced dataset
:rtype: Instances |
def read_end_of_message(self):
"""Read the b"\\r\\n" at the end of the message."""
read = self._file.read
last = read(1)
current = read(1)
while last != b'' and current != b'' and not \
(last == b'\r' and current == b'\n'):
last = current
c... | Read the b"\\r\\n" at the end of the message. |
def find_obfuscatables(tokens, obfunc, ignore_length=False):
"""
Iterates over *tokens*, which must be an equivalent output to what
tokenize.generate_tokens() produces, calling *obfunc* on each with the
following parameters:
- **tokens:** The current list of tokens.
- **index:** ... | Iterates over *tokens*, which must be an equivalent output to what
tokenize.generate_tokens() produces, calling *obfunc* on each with the
following parameters:
- **tokens:** The current list of tokens.
- **index:** The current position in the list.
*obfunc* is expected to return t... |
def size_container_folding(value):
"""
Convert value to ast expression if size is not too big.
Converter for sized container.
"""
if len(value) < MAX_LEN:
if isinstance(value, list):
return ast.List([to_ast(elt) for elt in value], ast.Load())
elif isinstance(value, tuple... | Convert value to ast expression if size is not too big.
Converter for sized container. |
def last_restapi_key_transformer(key, attr_desc, value):
"""A key transformer that returns the last RestAPI key.
:param str key: The attribute name
:param dict attr_desc: The attribute metadata
:param object value: The value
:returns: The last RestAPI key.
"""
key, value = full_restapi_key_... | A key transformer that returns the last RestAPI key.
:param str key: The attribute name
:param dict attr_desc: The attribute metadata
:param object value: The value
:returns: The last RestAPI key. |
def arange(start, stop=None, step=1.0, repeat=1, infer_range=None, ctx=None, dtype=mx_real_t):
"""Returns evenly spaced values within a given interval.
Values are generated within the half-open interval [`start`, `stop`). In other
words, the interval includes `start` but excludes `stop`. The function is
... | Returns evenly spaced values within a given interval.
Values are generated within the half-open interval [`start`, `stop`). In other
words, the interval includes `start` but excludes `stop`. The function is
similar to the built-in Python function `range` and to `numpy.arange`,
but returns an `NDArray`.... |
def readConfigFromJSON(self, fileName):
"""Read configuration from JSON.
:param fileName: path to the configuration file.
:type fileName: str.
"""
self.__logger.debug("readConfigFromJSON: reading from " + fileName)
with open(fileName) as data_file:
data = loa... | Read configuration from JSON.
:param fileName: path to the configuration file.
:type fileName: str. |
def _init_options(self, kwargs):
""" Initializes self.options """
self.options = self.task_config.options
if self.options is None:
self.options = {}
if kwargs:
self.options.update(kwargs)
# Handle dynamic lookup of project_config values via $project_confi... | Initializes self.options |
def do_output(self, *args):
"""Pass a command directly to the current output processor
"""
if args:
action, params = args[0], args[1:]
log.debug("Pass %s directly to output with %s", action, params)
function = getattr(self.output, "do_" + action, None)
... | Pass a command directly to the current output processor |
def preprocess_images(raw_color_im,
raw_depth_im,
camera_intr,
T_camera_world,
workspace_box,
workspace_im,
image_proc_config):
""" Preprocess a set of color and depth images. """
... | Preprocess a set of color and depth images. |
def _create_socket(self, socket_family):
"""Create Socket.
:param int socket_family:
:rtype: socket.socket
"""
sock = socket.socket(socket_family, socket.SOCK_STREAM, 0)
sock.settimeout(self._parameters['timeout'] or None)
if self.use_ssl:
if not comp... | Create Socket.
:param int socket_family:
:rtype: socket.socket |
def iter_format_block(
self, text=None,
width=60, chars=False, fill=False, newlines=False,
append=None, prepend=None, strip_first=False, strip_last=False,
lstrip=False):
""" Iterate over lines in a formatted block of text.
This iterator allows you to p... | Iterate over lines in a formatted block of text.
This iterator allows you to prepend to each line.
For basic blocks see iter_block().
Arguments:
text : String to format.
width : Maximum width for each line. The prepend string
... |
def stmt_lambdef_handle(self, original, loc, tokens):
"""Process multi-line lambdef statements."""
if len(tokens) == 2:
params, stmts = tokens
elif len(tokens) == 3:
params, stmts, last = tokens
if "tests" in tokens:
stmts = stmts.asList() + ["... | Process multi-line lambdef statements. |
def compute(self, inputs, outputs):
"""
Get the next record from the queue and outputs it.
"""
if len(self.queue) > 0:
# Take the top element of the data queue
data = self.queue.pop()
else:
raise Exception("RawValues: No data: queue is empty ")
# Copy data into output vectors
... | Get the next record from the queue and outputs it. |
def sim_levenshtein(src, tar, mode='lev', cost=(1, 1, 1, 1)):
"""Return the Levenshtein similarity of two strings.
This is a wrapper of :py:meth:`Levenshtein.sim`.
Parameters
----------
src : str
Source string for comparison
tar : str
Target string for comparison
mode : str... | Return the Levenshtein similarity of two strings.
This is a wrapper of :py:meth:`Levenshtein.sim`.
Parameters
----------
src : str
Source string for comparison
tar : str
Target string for comparison
mode : str
Specifies a mode for computing the Levenshtein distance:
... |
def extract_zip(zip_file_path):
"""
Returns:
dict: Dict[str, DataFrame]
"""
dfs = {}
with zipfile.ZipFile(zip_file_path, mode='r') as z_file:
names = z_file.namelist()
for name in names:
content = z_file.read(name)
_, tmp_file_path = tempfile.mkstemp()... | Returns:
dict: Dict[str, DataFrame] |
def get_comment(self, project, work_item_id, comment_id, include_deleted=None, expand=None):
"""GetComment.
[Preview API] Returns a work item comment.
:param str project: Project ID or project name
:param int work_item_id: Id of a work item to get the comment.
:param int comment_... | GetComment.
[Preview API] Returns a work item comment.
:param str project: Project ID or project name
:param int work_item_id: Id of a work item to get the comment.
:param int comment_id: Id of the comment to return.
:param bool include_deleted: Specify if the deleted comment sho... |
def _fold_line(self, line):
"""Write string line as one or more folded lines."""
if len(line) <= self._cols:
self._output_file.write(line)
self._output_file.write(self._line_sep)
else:
pos = self._cols
self._output_file.write(line[0:self._cols])
... | Write string line as one or more folded lines. |
def hotp(key, counter, digits=6):
"""
These test vectors come from RFC-4226
(https://tools.ietf.org/html/rfc4226#page-32).
>>> key = b'12345678901234567890'
>>> for c in range(10):
... hotp(key, c)
'755224'
'287082'
'359152'
'969429'
'338314'
'254676'
'287922'
... | These test vectors come from RFC-4226
(https://tools.ietf.org/html/rfc4226#page-32).
>>> key = b'12345678901234567890'
>>> for c in range(10):
... hotp(key, c)
'755224'
'287082'
'359152'
'969429'
'338314'
'254676'
'287922'
'162583'
'399871'
'520489' |
def parse_dict_header(value):
"""Parse lists of key, value pairs as described by RFC 2068 Section 2 and
convert them into a python dict:
>>> d = parse_dict_header('foo="is a fish", bar="as well"')
>>> type(d) is dict
True
>>> sorted(d.items())
[('bar', 'as well'), ('foo', 'is a fish')]
... | Parse lists of key, value pairs as described by RFC 2068 Section 2 and
convert them into a python dict:
>>> d = parse_dict_header('foo="is a fish", bar="as well"')
>>> type(d) is dict
True
>>> sorted(d.items())
[('bar', 'as well'), ('foo', 'is a fish')]
If there is no value for a key it wi... |
def as_dict(self):
"""
Returns the model as a dict
"""
if not self._is_valid:
self.validate()
from .converters import to_dict
return to_dict(self) | Returns the model as a dict |
def _zforce(self,R,z,phi=0.,t=0.):
"""
NAME:
zforce
PURPOSE:
evaluate vertical force K_z (R,z)
INPUT:
R - Cylindrical Galactocentric radius
z - vertical height
phi - azimuth
t - time
OUTPUT:
K_z (R,z)
... | NAME:
zforce
PURPOSE:
evaluate vertical force K_z (R,z)
INPUT:
R - Cylindrical Galactocentric radius
z - vertical height
phi - azimuth
t - time
OUTPUT:
K_z (R,z)
HISTORY:
2012-12-27 - Written - Bovy ... |
def reprioritize(self, stream_id,
depends_on=None, weight=16, exclusive=False):
"""
Update the priority status of an existing stream.
:param stream_id: The stream ID of the stream being updated.
:param depends_on: (optional) The ID of the stream that the stream now
... | Update the priority status of an existing stream.
:param stream_id: The stream ID of the stream being updated.
:param depends_on: (optional) The ID of the stream that the stream now
depends on. If ``None``, will be moved to depend on stream 0.
:param weight: (optional) The new weigh... |
def pyside_load_ui(uifile, base_instance=None):
"""Provide PyQt4.uic.loadUi functionality to PySide
Args:
uifile (str): Absolute path to .ui file
base_instance (QWidget): The widget into which UI widgets are loaded
Note:
pysideuic is required for this to work with PySide.
... | Provide PyQt4.uic.loadUi functionality to PySide
Args:
uifile (str): Absolute path to .ui file
base_instance (QWidget): The widget into which UI widgets are loaded
Note:
pysideuic is required for this to work with PySide.
This seems to work correctly in Maya as well as outsid... |
def _convert_to_dict(self, setting):
'''
Converts a settings file into a dictionary, ignoring python defaults
@param setting: A loaded setting module
'''
the_dict = {}
set = dir(setting)
for key in set:
if key in self.ignore:
continue
... | Converts a settings file into a dictionary, ignoring python defaults
@param setting: A loaded setting module |
def SearchFileNameTable(self, fileName):
"""
Search FileName table.
Find the show id for a given file name.
Parameters
----------
fileName : string
File name to look up in table.
Returns
----------
int or None
If a match is found in the database table the show ... | Search FileName table.
Find the show id for a given file name.
Parameters
----------
fileName : string
File name to look up in table.
Returns
----------
int or None
If a match is found in the database table the show id for this
entry is returned, otherwise this... |
def get_all_roles(path_prefix=None, region=None, key=None, keyid=None,
profile=None):
'''
Get and return all IAM role details, starting at the optional path.
.. versionadded:: 2016.3.0
CLI Example:
salt-call boto_iam.get_all_roles
'''
conn = _get_conn(region=region, k... | Get and return all IAM role details, starting at the optional path.
.. versionadded:: 2016.3.0
CLI Example:
salt-call boto_iam.get_all_roles |
def watchdog_handler(self):
"""Take care of threads if wachdog expires."""
_LOGGING.debug('%s Watchdog expired. Resetting connection.', self.name)
self.watchdog.stop()
self.reset_thrd.set() | Take care of threads if wachdog expires. |
def dropbox_factory(request):
""" expects the id of an existing dropbox and returns its instance"""
try:
return request.registry.settings['dropbox_container'].get_dropbox(request.matchdict['drop_id'])
except KeyError:
raise HTTPNotFound('no such dropbox') | expects the id of an existing dropbox and returns its instance |
def version_option(version=None, *param_decls, **attrs):
"""Adds a ``--version`` option which immediately ends the program
printing out the version number. This is implemented as an eager
option that prints the version and exits the program in the callback.
:param version: the version number to show. ... | Adds a ``--version`` option which immediately ends the program
printing out the version number. This is implemented as an eager
option that prints the version and exits the program in the callback.
:param version: the version number to show. If not provided Click
attempts an auto disc... |
def record_command(self, cmd, prg=''):
"""
record the command passed - this is usually the name of the program
being run or task being run
"""
self._log(self.logFileCommand , force_to_string(cmd), prg) | record the command passed - this is usually the name of the program
being run or task being run |
def train(self, x_data, y_data):
"""Trains model on inputs
:param x_data: x matrix
:param y_data: y array
"""
x_train, _, y_train, _ = train_test_split(
x_data,
y_data,
test_size=0.67,
random_state=None
) # cross-split
... | Trains model on inputs
:param x_data: x matrix
:param y_data: y array |
def log_det_jacobian(self, inputs):
"""Returns log det | dx / dy | = num_events * sum log | scale |."""
del inputs # unused
# Number of events is number of all elements excluding the batch and
# channel dimensions.
num_events = tf.reduce_prod(tf.shape(inputs)[1:-1])
log_det_jacobian = num_event... | Returns log det | dx / dy | = num_events * sum log | scale |. |
def ping(host, timeout=False, return_boolean=False):
'''
Performs an ICMP ping to a host
.. versionchanged:: 2015.8.0
Added support for SunOS
CLI Example:
.. code-block:: bash
salt '*' network.ping archlinux.org
.. versionadded:: 2015.5.0
Return a True or False instead ... | Performs an ICMP ping to a host
.. versionchanged:: 2015.8.0
Added support for SunOS
CLI Example:
.. code-block:: bash
salt '*' network.ping archlinux.org
.. versionadded:: 2015.5.0
Return a True or False instead of ping output.
.. code-block:: bash
salt '*' netwo... |
def set_exception(self, exception, override=False):
"""Set an exception for the TransferFuture
Implies the TransferFuture failed.
:param exception: The exception that cause the transfer to fail.
:param override: If True, override any existing state.
"""
with self._lock:... | Set an exception for the TransferFuture
Implies the TransferFuture failed.
:param exception: The exception that cause the transfer to fail.
:param override: If True, override any existing state. |
def download_uniprot_file(uniprot_id, filetype, outdir='', force_rerun=False):
"""Download a UniProt file for a UniProt ID/ACC
Args:
uniprot_id: Valid UniProt ID
filetype: txt, fasta, xml, rdf, or gff
outdir: Directory to download the file
Returns:
str: Absolute path to fil... | Download a UniProt file for a UniProt ID/ACC
Args:
uniprot_id: Valid UniProt ID
filetype: txt, fasta, xml, rdf, or gff
outdir: Directory to download the file
Returns:
str: Absolute path to file |
def _create_glance_db(self, root_db_pass, glance_db_pass):
"""Create the glance database"""
print red(env.host_string + ' | Create glance database')
sudo(
"mysql -uroot -p{0} -e \"CREATE DATABASE glance;\"".format(root_db_pass), shell=False)
sudo("mysql -uroot -p{0} -e \... | Create the glance database |
def server_deployment_mode(command, parser, cluster, cl_args):
'''
check the server deployment mode for the given cluster
if it is valid return the valid set of args
:param cluster:
:param cl_args:
:return:
'''
# Read the cluster definition, if not found
client_confs = cdefs.read_server_mode_cluster_d... | check the server deployment mode for the given cluster
if it is valid return the valid set of args
:param cluster:
:param cl_args:
:return: |
def _pdb_frame(self):
"""Return current Pdb frame if there is any"""
if self._pdb_obj is not None and self._pdb_obj.curframe is not None:
return self._pdb_obj.curframe | Return current Pdb frame if there is any |
def get_distance_matrix(self):
"""
Compute and return distances between each pairs of points in the mesh.
This method requires that the coordinate arrays are one-dimensional.
NB: the depth of the points is ignored
.. warning::
Because of its quadratic space and time... | Compute and return distances between each pairs of points in the mesh.
This method requires that the coordinate arrays are one-dimensional.
NB: the depth of the points is ignored
.. warning::
Because of its quadratic space and time complexity this method
is safe to use ... |
def plot(self, data=None, **kwargs):
"""
Plot the data
Parameters
----------
data : numpy array, pandas dataframe or list of arrays/dfs
The data to plot. If no data is passed, the xform_data from
the DataGeometry object will be returned.
kwargs ... | Plot the data
Parameters
----------
data : numpy array, pandas dataframe or list of arrays/dfs
The data to plot. If no data is passed, the xform_data from
the DataGeometry object will be returned.
kwargs : keyword arguments
Any keyword arguments sup... |
def retrieve_console_log(self, filename=None, dir=None):
"""Retrieves the application console log (standard out and error)
files for this PE and saves them as a plain text file.
An existing file with the same name will be overwritten.
Args:
filename (str): name of the creat... | Retrieves the application console log (standard out and error)
files for this PE and saves them as a plain text file.
An existing file with the same name will be overwritten.
Args:
filename (str): name of the created file. Defaults to `pe_<id>_<timestamp>.stdouterr` where `id` is t... |
def kill_tweens(self, obj = None):
"""Stop tweening an object, without completing the motion or firing the
on_complete"""
if obj is not None:
try:
del self.current_tweens[obj]
except:
pass
else:
self.current_tweens = col... | Stop tweening an object, without completing the motion or firing the
on_complete |
def create_gce_image(zone,
project,
instance_name,
name,
description):
"""
Shuts down the instance and creates and image from the disk.
Assumes that the disk name is the same as the instance_name (this is the
default be... | Shuts down the instance and creates and image from the disk.
Assumes that the disk name is the same as the instance_name (this is the
default behavior for boot disks on GCE). |
def _nodes(self):
"""
Returns the list of nodes present in the network
Examples
--------
>>> from pgmpy.models import DynamicBayesianNetwork as DBN
>>> dbn = DBN()
>>> dbn.add_nodes_from(['A', 'B', 'C'])
>>> sorted(dbn._nodes())
['B', 'A', 'C']
... | Returns the list of nodes present in the network
Examples
--------
>>> from pgmpy.models import DynamicBayesianNetwork as DBN
>>> dbn = DBN()
>>> dbn.add_nodes_from(['A', 'B', 'C'])
>>> sorted(dbn._nodes())
['B', 'A', 'C'] |
def get_port_profile_status_input_port_profile_status(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_port_profile_status = ET.Element("get_port_profile_status")
config = get_port_profile_status
input = ET.SubElement(get_port_profile_status, ... | Auto Generated Code |
def initialiseDevice(self):
"""
performs initialisation of the device
:param batchSize: the no of samples that each provideData call should yield
:return:
"""
logger.debug("Initialising device")
self.getInterruptStatus()
self.setAccelerometerSensitivity(se... | performs initialisation of the device
:param batchSize: the no of samples that each provideData call should yield
:return: |
def compute_header_hmac_hash(context):
"""Compute HMAC-SHA256 hash of header.
Used to prevent header tampering."""
return hmac.new(
hashlib.sha512(
b'\xff' * 8 +
hashlib.sha512(
context._.header.value.dynamic_header.master_seed.data +
context.... | Compute HMAC-SHA256 hash of header.
Used to prevent header tampering. |
def parse_description():
"""
Parse the description in the README file
pandoc --from=markdown --to=rst --output=README.rst README.md
CommandLine:
python -c "import setup; print(setup.parse_description())"
"""
from os.path import dirname, join, exists
readme_fpath = join(dirname(__fi... | Parse the description in the README file
pandoc --from=markdown --to=rst --output=README.rst README.md
CommandLine:
python -c "import setup; print(setup.parse_description())" |
def ekbseg(handle, tabnam, cnames, decls):
"""
Start a new segment in an E-kernel.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ekbseg_c.html
:param handle: File handle.
:type handle: int
:param tabnam: Table name.
:type tabnam: str
:param cnames: Names of columns.
:type... | Start a new segment in an E-kernel.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ekbseg_c.html
:param handle: File handle.
:type handle: int
:param tabnam: Table name.
:type tabnam: str
:param cnames: Names of columns.
:type cnames: list of str.
:param decls: Declarations of... |
def _to_ctfile(self):
"""Convert :class:`~ctfile.ctfile.CTfile` into `CTfile` formatted string.
:return: ``CTfile`` formatted string.
:rtype: :py:class:`str`.
"""
output = io.StringIO()
for key in self:
if key == 'HeaderBlock':
for line in se... | Convert :class:`~ctfile.ctfile.CTfile` into `CTfile` formatted string.
:return: ``CTfile`` formatted string.
:rtype: :py:class:`str`. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.