code stringlengths 75 104k | docstring stringlengths 1 46.9k |
|---|---|
def deserialize(self, data, columns=None):
"""
Deserializes SON to a DataFrame
Parameters
----------
data: SON data
columns: None, or list of strings
optionally you can deserialize a subset of the data in the SON. Index
columns are ALWAYS deserial... | Deserializes SON to a DataFrame
Parameters
----------
data: SON data
columns: None, or list of strings
optionally you can deserialize a subset of the data in the SON. Index
columns are ALWAYS deserialized, and should not be specified
Returns
----... |
def __buildDomainRanges(self, aProp):
"""
extract domain/range details and add to Python objects
"""
domains = chain(aProp.rdflib_graph.objects(
None, rdflib.term.URIRef(u'http://schema.org/domainIncludes')), aProp.rdflib_graph.objects(
None, rdflib.RDFS.domain))... | extract domain/range details and add to Python objects |
def force_bytes(s, encoding='utf-8', errors='strict'):
"""A function turns "s" into bytes object, similar to django.utils.encoding.force_bytes
"""
# Handle the common case first for performance reasons.
if isinstance(s, bytes):
if encoding == 'utf-8':
return s
else:
... | A function turns "s" into bytes object, similar to django.utils.encoding.force_bytes |
def schema(ctx, schema):
""" Load schema definitions from a YAML file. """
data = yaml.load(schema)
if not isinstance(data, (list, tuple)):
data = [data]
with click.progressbar(data, label=schema.name) as bar:
for schema in bar:
ctx.obj['grano'].schemata.upsert(schema) | Load schema definitions from a YAML file. |
def check_profile_id(self, profile_name: str) -> Profile:
"""
Consult locally stored ID of profile with given name, check whether ID matches and whether name
has changed and return current name of the profile, and store ID of profile.
:param profile_name: Profile name
:return: I... | Consult locally stored ID of profile with given name, check whether ID matches and whether name
has changed and return current name of the profile, and store ID of profile.
:param profile_name: Profile name
:return: Instance of current profile |
def copyPropList(self, cur):
"""Do a copy of an attribute list. """
if cur is None: cur__o = None
else: cur__o = cur._o
ret = libxml2mod.xmlCopyPropList(self._o, cur__o)
if ret is None:raise treeError('xmlCopyPropList() failed')
__tmp = xmlAttr(_obj=ret)
return __... | Do a copy of an attribute list. |
def associate_failure_node(self, parent, child=None, **kwargs):
"""Add a node to run on failure.
=====API DOCS=====
Add a node to run on failure.
:param parent: Primary key of parent node to associate failure node to.
:type parent: int
:param child: Primary key of child... | Add a node to run on failure.
=====API DOCS=====
Add a node to run on failure.
:param parent: Primary key of parent node to associate failure node to.
:type parent: int
:param child: Primary key of child node to be associated.
:type child: int
:param `**kwargs`:... |
def spl_json(self):
"""Private method. May be removed at any time."""
_splj = {}
_splj["type"] = self._type
_splj["value"] = self._value
return _splj | Private method. May be removed at any time. |
def delete_dependency(self, from_task_name, to_task_name):
""" Delete a dependency between two tasks. """
logger.debug('Deleting dependency from {0} to {1}'.format(from_task_name, to_task_name))
if not self.state.allow_change_graph:
raise DagobahError("job's graph is immutable in it... | Delete a dependency between two tasks. |
def cached_value(self, source_file, configuration):
"""Return the cached declarations or None.
:param source_file: Header file name
:type source_file: str
:param configuration: Configuration object
:type configuration: :class:`parser.xml_generator_configuration_t`
:rtype... | Return the cached declarations or None.
:param source_file: Header file name
:type source_file: str
:param configuration: Configuration object
:type configuration: :class:`parser.xml_generator_configuration_t`
:rtype: Cached declarations or None |
def restore(self, fade=False):
"""Restore the state of a device to that which was previously saved.
For coordinator devices restore everything. For slave devices
only restore volume etc., not transport info (transport info
comes from the slave's coordinator).
Args:
... | Restore the state of a device to that which was previously saved.
For coordinator devices restore everything. For slave devices
only restore volume etc., not transport info (transport info
comes from the slave's coordinator).
Args:
fade (bool): Whether volume should be fade... |
def decode_chain_list(in_bytes):
"""Convert a list of bytes to a list of strings. Each string is of length mmtf.CHAIN_LEN
:param in_bytes: the input bytes
:return the decoded list of strings"""
tot_strings = len(in_bytes) // mmtf.utils.constants.CHAIN_LEN
out_strings = []
for i in range(tot_str... | Convert a list of bytes to a list of strings. Each string is of length mmtf.CHAIN_LEN
:param in_bytes: the input bytes
:return the decoded list of strings |
def fit_model(ts, sc=None):
"""
Fits an AR(1) + GARCH(1, 1) model to the given time series.
Parameters
----------
ts:
the time series to which we want to fit a AR+GARCH model as a Numpy array
Returns an ARGARCH model
"""
assert sc != None, "Missing SparkContext"
... | Fits an AR(1) + GARCH(1, 1) model to the given time series.
Parameters
----------
ts:
the time series to which we want to fit a AR+GARCH model as a Numpy array
Returns an ARGARCH model |
def create_producer(self):
"""Context manager that yields an instance of ``Producer``."""
with self.connection_pool.acquire(block=True) as conn:
yield self.producer(conn) | Context manager that yields an instance of ``Producer``. |
def install_vendored(cls, prefix, root=None, expose=None):
"""Install an importer for all vendored code with the given import prefix.
All distributions listed in ``expose`` will also be made available for import in direct,
un-prefixed form.
:param str prefix: The import prefix the installed importer w... | Install an importer for all vendored code with the given import prefix.
All distributions listed in ``expose`` will also be made available for import in direct,
un-prefixed form.
:param str prefix: The import prefix the installed importer will be responsible for.
:param str root: The root path of the ... |
def reference_preprocessing(job, config):
"""
Creates a genome fasta index and sequence dictionary file if not already present in the pipeline config.
:param JobFunctionWrappingJob job: passed automatically by Toil
:param Namespace config: Pipeline configuration options and shared files.
... | Creates a genome fasta index and sequence dictionary file if not already present in the pipeline config.
:param JobFunctionWrappingJob job: passed automatically by Toil
:param Namespace config: Pipeline configuration options and shared files.
Requires FileStoreID for genome fasta f... |
def remove(name, path):
'''
Removes installed alternative for defined <name> and <path>
or fallback to default alternative, if some defined before.
name
is the master name for this link group
(e.g. pager)
path
is the location of one of the alternative target files.
... | Removes installed alternative for defined <name> and <path>
or fallback to default alternative, if some defined before.
name
is the master name for this link group
(e.g. pager)
path
is the location of one of the alternative target files.
(e.g. /usr/bin/less) |
def _call_function(name, returner=None, **kwargs):
'''
Calls a function from the specified module.
:param name:
:param kwargs:
:return:
'''
argspec = salt.utils.args.get_function_argspec(__salt__[name])
# func_kw is initialized to a dictionary of keyword arguments the function to be ru... | Calls a function from the specified module.
:param name:
:param kwargs:
:return: |
def show_history(self, status=None, nids=None, full_history=False, metadata=False):
"""
Print the history of the flow to stdout.
Args:
status: if not None, only the tasks with this status are select
full_history: Print full info set, including nodes with an empty history... | Print the history of the flow to stdout.
Args:
status: if not None, only the tasks with this status are select
full_history: Print full info set, including nodes with an empty history.
nids: optional list of node identifiers used to filter the tasks.
metadata: pr... |
def unpack_response(file_information_class, buffer):
"""
Pass in the buffer value from the response object to unpack it and
return a list of query response structures for the request.
:param buffer: The raw bytes value of the SMB2QueryDirectoryResponse
buffer field.
... | Pass in the buffer value from the response object to unpack it and
return a list of query response structures for the request.
:param buffer: The raw bytes value of the SMB2QueryDirectoryResponse
buffer field.
:return: List of query_info.* structures based on the
FileInf... |
def _float(text):
"""Fonction to convert the 'decimal point assumed' format of TLE to actual
float
>>> _float('0000+0')
0.0
>>> _float('+0000+0')
0.0
>>> _float('34473-3')
0.00034473
>>> _float('-60129-4')
-6.0129e-05
>>> _float('+45871-4')
4.5871e-05
"""
text ... | Fonction to convert the 'decimal point assumed' format of TLE to actual
float
>>> _float('0000+0')
0.0
>>> _float('+0000+0')
0.0
>>> _float('34473-3')
0.00034473
>>> _float('-60129-4')
-6.0129e-05
>>> _float('+45871-4')
4.5871e-05 |
def sym(self, nested_scope=None):
"""Return the correspond symbolic number."""
operation = self.children[0].operation()
expr = self.children[1].sym(nested_scope)
return operation(expr) | Return the correspond symbolic number. |
def dns_get_conf(self, domainName, environment):
"""
Returns the existing domain configuration and token from the ADNS
"""
response = self.client.service.dns_get_conf(domainName, environment)
dns_config = CotendoDNS(response)
return dns_config | Returns the existing domain configuration and token from the ADNS |
def validate(self, value, model=None, context=None):
"""
Validate
Perform value validation and return result
:param value: value to check
:param model: parent model being validated
:param context: object or None, validation context
:re... | Validate
Perform value validation and return result
:param value: value to check
:param model: parent model being validated
:param context: object or None, validation context
:return: shiftschema.results.SimpleResult |
def setProduct(self, cache=False, *args, **kwargs):
"""Adds the product for this loan to a 'product' field.
Product is a MambuProduct object.
cache argument allows to use AllMambuProducts singleton to
retrieve the products. See mambuproduct.AllMambuProducts code
and pydoc for f... | Adds the product for this loan to a 'product' field.
Product is a MambuProduct object.
cache argument allows to use AllMambuProducts singleton to
retrieve the products. See mambuproduct.AllMambuProducts code
and pydoc for further information.
Returns the number of requests don... |
def _edit_tags(self, tag, items, locked=True, remove=False):
""" Helper to edit and refresh a tags.
Parameters:
tag (str): tag name
items (list): list of tags to add
locked (bool): lock this field.
remove (bool): If this is active remo... | Helper to edit and refresh a tags.
Parameters:
tag (str): tag name
items (list): list of tags to add
locked (bool): lock this field.
remove (bool): If this is active remove the tags in items. |
def add_edge(self, source, target):
"""Returns a new edge connecting source and target vertices.
Args:
source: The source Vertex.
target: The target Vertex.
Returns:
A new Edge linking source to target.
"""
edge = Edge(len(self.edges))
self.edges.append(edge)
source.out_e... | Returns a new edge connecting source and target vertices.
Args:
source: The source Vertex.
target: The target Vertex.
Returns:
A new Edge linking source to target. |
def encrypt(api_context, request_bytes, custom_headers):
"""
:type api_context: bunq.sdk.context.ApiContext
:type request_bytes: bytes
:type custom_headers: dict[str, str]
:rtype: bytes
"""
key = Random.get_random_bytes(_AES_KEY_SIZE)
iv = Random.get_random_bytes(_BLOCK_SIZE)
_add_... | :type api_context: bunq.sdk.context.ApiContext
:type request_bytes: bytes
:type custom_headers: dict[str, str]
:rtype: bytes |
def absorb(self, trits, offset=0, length=None):
# type: (Sequence[int], Optional[int], Optional[int]) -> None
"""
Absorb trits into the sponge.
:param trits:
Sequence of trits to absorb.
:param offset:
Starting offset in ``trits``.
:param length... | Absorb trits into the sponge.
:param trits:
Sequence of trits to absorb.
:param offset:
Starting offset in ``trits``.
:param length:
Number of trits to absorb. Defaults to ``len(trits)``. |
def check_api_key(request, key, hproPk):
"""Check if an API key is valid"""
if settings.PIAPI_STANDALONE:
return True
(_, _, hproject) = getPlugItObject(hproPk)
if not hproject:
return False
if hproject.plugItApiKey is None or hproject.plugItApiKey == '':
return False
... | Check if an API key is valid |
def create(self, project, title, href, **attrs):
"""
Create a new :class:`WikiLink`
:param project: :class:`Project` id
:param title: title of the wiki link
:param href: href for the wiki link
:param attrs: optional attributes for the :class:`WikiLink`
"""
... | Create a new :class:`WikiLink`
:param project: :class:`Project` id
:param title: title of the wiki link
:param href: href for the wiki link
:param attrs: optional attributes for the :class:`WikiLink` |
def s2p(self):
"""Return 2 proton separation energy"""
M_P = 7.28897050 # proton mass excess in MeV
f = lambda parent, daugther: -parent + daugther + 2 * M_P
return self.derived('s2p', (-2, 0), f) | Return 2 proton separation energy |
def convertDict2Attrs(self, *args, **kwargs):
"""The trick for iterable Mambu Objects comes here:
You iterate over each element of the responded List from Mambu,
and create a Mambu Loan (or your own itemclass) object for each
one, initializing them one at a time, and changing the attrs
... | The trick for iterable Mambu Objects comes here:
You iterate over each element of the responded List from Mambu,
and create a Mambu Loan (or your own itemclass) object for each
one, initializing them one at a time, and changing the attrs
attribute (which just holds a list of plain dicti... |
def intersect(self, other):
"""
Makes a striplog of all intersections.
Args:
Striplog. The striplog instance to intersect with.
Returns:
Striplog. The result of the intersection.
"""
if not isinstance(other, self.__class__):
m = "You ... | Makes a striplog of all intersections.
Args:
Striplog. The striplog instance to intersect with.
Returns:
Striplog. The result of the intersection. |
def fetchMore(self, index):
'''Fetch additional data under *index*.'''
sourceModel = self.sourceModel()
if not sourceModel:
return False
return sourceModel.fetchMore(self.mapToSource(index)) | Fetch additional data under *index*. |
def parse_response(response):
"""
parse response and return a dictionary if the content type.
is json/application.
:param response: HTTPRequest
:return dictionary for json content type otherwise response body
"""
if response.headers.get('Content-Type', JSON_TYPE).startswith(JSON_TYPE):
... | parse response and return a dictionary if the content type.
is json/application.
:param response: HTTPRequest
:return dictionary for json content type otherwise response body |
def rm_r(sftp, path):
"""Recursively delete contents of path
https://stackoverflow.com/a/23256181
"""
files = sftp.listdir(path)
for f in files:
filepath = os.path.join(path, f)
logger.info('Deleting: %s' % (filepath))
try:
sftp.remove(filepath)
except IOE... | Recursively delete contents of path
https://stackoverflow.com/a/23256181 |
def shell(command, *args):
'''Pass a command into the shell.'''
if args:
command = command.format(*args)
print LOCALE['shell'].format(command)
try:
return subprocess.check_output(command, shell=True)
except subprocess.CalledProcessError, ex:
return ex | Pass a command into the shell. |
def get_typecast(self):
"""Returns the typecast or ``None`` of this object as a string."""
midx, marker = self.token_next_by(m=(T.Punctuation, '::'))
nidx, next_ = self.token_next(midx, skip_ws=False)
return next_.value if next_ else None | Returns the typecast or ``None`` of this object as a string. |
def cumprod_to_tensor_axis(self, cumprod):
"""Maximum tensor axis i such that self.cumprod[i] == cumprod, or None."""
try:
return len(self) - 1 - self.cumprod[::-1].index(cumprod)
except ValueError:
return None | Maximum tensor axis i such that self.cumprod[i] == cumprod, or None. |
def get_file_info(hash, context=None):
"""Returns information about the file, identified by ``hash``.
If the `context` (an ident-hash) is supplied,
the information returned will be specific to that context.
"""
if context is None:
stmt = _get_sql('get-file-info.sql')
args = dict(has... | Returns information about the file, identified by ``hash``.
If the `context` (an ident-hash) is supplied,
the information returned will be specific to that context. |
def load_xml(self, xmlfile, **kwargs):
"""Load sources from an XML file."""
extdir = kwargs.get('extdir', self.extdir)
coordsys = kwargs.get('coordsys', 'CEL')
if not os.path.isfile(xmlfile):
xmlfile = os.path.join(fermipy.PACKAGE_DATA, 'catalogs', xmlfile)
root = E... | Load sources from an XML file. |
def build_kernel_to_data(self, Y, knn=None, bandwidth=None,
bandwidth_scale=None):
"""Build a kernel from new input data `Y` to the `self.data`
Parameters
----------
Y: array-like, [n_samples_y, n_features]
new data for which an affinity matrix ... | Build a kernel from new input data `Y` to the `self.data`
Parameters
----------
Y: array-like, [n_samples_y, n_features]
new data for which an affinity matrix is calculated
to the existing data. `n_features` must match
either the ambient or PCA dimensions
... |
def isPartitionMarkedForEvent(self, db_name, tbl_name, part_vals, eventType):
"""
Parameters:
- db_name
- tbl_name
- part_vals
- eventType
"""
self.send_isPartitionMarkedForEvent(db_name, tbl_name, part_vals, eventType)
return self.recv_isPartitionMarkedForEvent() | Parameters:
- db_name
- tbl_name
- part_vals
- eventType |
def create_client_from_env(username=None,
api_key=None,
endpoint_url=None,
timeout=None,
auth=None,
config_file=None,
proxy=None,
u... | Creates a SoftLayer API client using your environment.
Settings are loaded via keyword arguments, environemtal variables and
config file.
:param username: an optional API username if you wish to bypass the
package's built-in username
:param api_key: an optional API key if you wish to bypass th... |
def splice(self, mark, newdata):
"""Replace the data after the marked location with the specified data."""
self.jump_to(mark)
self._data = self._data[:self._offset] + bytearray(newdata) | Replace the data after the marked location with the specified data. |
def _verify_run(out, cmd=None):
'''
Crash to the log if command execution was not successful.
'''
if out.get('retcode', 0) and out['stderr']:
if cmd:
log.debug('Command: \'%s\'', cmd)
log.debug('Return code: %s', out.get('retcode'))
log.debug('Error output:\n%s', out... | Crash to the log if command execution was not successful. |
def boolmask(indices, maxval=None):
"""
Constructs a list of booleans where an item is True if its position is in
`indices` otherwise it is False.
Args:
indices (list): list of integer indices
maxval (int): length of the returned list. If not specified
this is inferred from... | Constructs a list of booleans where an item is True if its position is in
`indices` otherwise it is False.
Args:
indices (list): list of integer indices
maxval (int): length of the returned list. If not specified
this is inferred from `indices`
Note:
In the future the ... |
def build_image_list(config, image, imagefile, all_local, include_allanchore, dockerfile=None, exclude_file=None):
"""Given option inputs from the cli, construct a list of image ids. Includes all found with no exclusion logic"""
if not image and not (imagefile or all_local):
raise click.BadOptionUsage(... | Given option inputs from the cli, construct a list of image ids. Includes all found with no exclusion logic |
def get_bool(self, key, default=UndefinedKey):
"""Return boolean representation of value found at key
:param key: key to use (dot separated). E.g., a.b.c
:type key: basestring
:param default: default value if key not found
:type default: bool
:return: boolean value
... | Return boolean representation of value found at key
:param key: key to use (dot separated). E.g., a.b.c
:type key: basestring
:param default: default value if key not found
:type default: bool
:return: boolean value
:type return: bool |
def get_n_cluster_per_event_hist(cluster_table):
'''Calculates the number of cluster in every event.
Parameters
----------
cluster_table : pytables.table
Returns
-------
numpy.Histogram
'''
logging.info("Histogram number of cluster per event")
cluster_in_events = analysis_utils... | Calculates the number of cluster in every event.
Parameters
----------
cluster_table : pytables.table
Returns
-------
numpy.Histogram |
def integer(self, x):
"""
returns a plain integer
"""
if type(x) is str:
hex = binascii.unhexlify(x)
return int.from_bytes(hex, 'big')
return x.value if isinstance(x, FiniteField.Value) else x | returns a plain integer |
def get_profile(self, ann_el_demand_per_sector):
""" Get the profiles for the given annual demand
Parameters
----------
ann_el_demand_per_sector : dictionary
Key: sector, value: annual value
Returns
-------
pandas.DataFrame : Table with all profiles
... | Get the profiles for the given annual demand
Parameters
----------
ann_el_demand_per_sector : dictionary
Key: sector, value: annual value
Returns
-------
pandas.DataFrame : Table with all profiles |
def iob2json(input_data, n_sents=10, *args, **kwargs):
"""
Convert IOB files into JSON format for use with train cli.
"""
docs = []
for group in minibatch(docs, n_sents):
group = list(group)
first = group.pop(0)
to_extend = first["paragraphs"][0]["sentences"]
for sent... | Convert IOB files into JSON format for use with train cli. |
def getmember(self, name):
"""Return a TarInfo object for member `name'. If `name' can not be
found in the archive, KeyError is raised. If a member occurs more
than once in the archive, its last occurrence is assumed to be the
most up-to-date version.
"""
tarinfo... | Return a TarInfo object for member `name'. If `name' can not be
found in the archive, KeyError is raised. If a member occurs more
than once in the archive, its last occurrence is assumed to be the
most up-to-date version. |
def state(self, abbr: bool = False) -> str:
"""Get a random administrative district of country.
:param abbr: Return ISO 3166-2 code.
:return: Administrative district.
"""
return self.random.choice(
self._data['state']['abbr' if abbr else 'name']) | Get a random administrative district of country.
:param abbr: Return ISO 3166-2 code.
:return: Administrative district. |
def write_min_max(self, file):
""" Writes minimum and maximum values to a table.
"""
report = CaseReport(self.case)
col1_header = "Attribute"
col1_width = 19
col2_header = "Minimum"
col3_header = "Maximum"
col_width = 22
sep = "="*col1_width +... | Writes minimum and maximum values to a table. |
def langids(self):
""" Return the USB device's supported language ID codes.
These are 16-bit codes familiar to Windows developers, where for
example instead of en-US you say 0x0409. USB_LANGIDS.pdf on the usb.org
developer site for more info. String requests using a LANGID not
i... | Return the USB device's supported language ID codes.
These are 16-bit codes familiar to Windows developers, where for
example instead of en-US you say 0x0409. USB_LANGIDS.pdf on the usb.org
developer site for more info. String requests using a LANGID not
in this array should not be sent... |
def _serialize_json(obj, fp):
""" Serialize ``obj`` as a JSON formatted stream to ``fp`` """
json.dump(obj, fp, indent=4, default=serialize) | Serialize ``obj`` as a JSON formatted stream to ``fp`` |
def legacy_signature(**kwargs_mapping):
"""
This decorator makes it possible to call a function using old argument names
when they are passed as keyword arguments.
@legacy_signature(old_arg1='arg1', old_arg2='arg2')
def func(arg1, arg2=1):
return arg1 + arg2
func(old_arg1=1) == 2
f... | This decorator makes it possible to call a function using old argument names
when they are passed as keyword arguments.
@legacy_signature(old_arg1='arg1', old_arg2='arg2')
def func(arg1, arg2=1):
return arg1 + arg2
func(old_arg1=1) == 2
func(old_arg1=1, old_arg2=2) == 3 |
def get_versions():
"""Return the list of supported PDF versions.
See :meth:`restrict_to_version`.
:return: A list of :ref:`PDF_VERSION` strings.
*New in cairo 1.10.*
"""
versions = ffi.new('cairo_pdf_version_t const **')
num_versions = ffi.new('int *')
... | Return the list of supported PDF versions.
See :meth:`restrict_to_version`.
:return: A list of :ref:`PDF_VERSION` strings.
*New in cairo 1.10.* |
def start_auth(self, context, internal_req):
"""
See super class method satosa.backends.base.BackendModule#start_auth
:type context: satosa.context.Context
:type internal_req: satosa.internal.InternalData
:rtype: satosa.response.Response
"""
target_entity_id = co... | See super class method satosa.backends.base.BackendModule#start_auth
:type context: satosa.context.Context
:type internal_req: satosa.internal.InternalData
:rtype: satosa.response.Response |
def pop_first_arg(argv):
"""
find first positional arg (does not start with -), take it out of array and return it separately
returns (arg, array)
"""
for arg in argv:
if not arg.startswith('-'):
argv.remove(arg)
return (arg, argv)
return (None, argv) | find first positional arg (does not start with -), take it out of array and return it separately
returns (arg, array) |
def rpush(self, name, *values):
"""
Push the value into the list from the *right* side
:param name: str the name of the redis key
:param values: a list of values or single value to push
:return: Future()
"""
with self.pipe as pipe:
v_encode = self... | Push the value into the list from the *right* side
:param name: str the name of the redis key
:param values: a list of values or single value to push
:return: Future() |
def responses_of(self, request):
"""
Find the responses corresponding to a request.
This function isn't actually used by VCR internally, but is
provided as an external API.
"""
responses = [response for index, response in self._responses(request)]
if responses:
... | Find the responses corresponding to a request.
This function isn't actually used by VCR internally, but is
provided as an external API. |
def mean_field(self):
"""Calculates mean field"""
mean_field = []
for sp_oper in [self.oper['O'], self.oper['O_d']]:
avgO = np.array([self.expected(op) for op in sp_oper])
avgO[abs(avgO) < 1e-10] = 0.
mean_field.append(avgO*self.param['ekin'])
return ... | Calculates mean field |
def raise_on_errors(errors, level=logging.CRITICAL):
"""Raise a CoTError if errors.
Helper function because I had this code block everywhere.
Args:
errors (list): the error errors
level (int, optional): the log level to use. Defaults to logging.CRITICAL
Raises:
CoTError: if e... | Raise a CoTError if errors.
Helper function because I had this code block everywhere.
Args:
errors (list): the error errors
level (int, optional): the log level to use. Defaults to logging.CRITICAL
Raises:
CoTError: if errors is non-empty |
def _get_algorithm_info(self, algorithm_info):
'''Get algorithm info'''
if algorithm_info['algorithm'] not in self.ALGORITHMS:
raise Exception('Algorithm not supported: %s'
% algorithm_info['algorithm'])
algorithm = self.ALGORITHMS[algorithm_info['algori... | Get algorithm info |
def clear_dns_cache(self,
host: Optional[str]=None,
port: Optional[int]=None) -> None:
"""Remove specified host/port or clear all dns local cache."""
if host is not None and port is not None:
self._cached_hosts.remove((host, port))
elif... | Remove specified host/port or clear all dns local cache. |
def update_and_transform(self, y, exogenous, **kwargs):
"""Update the params and return the transformed arrays
Since no parameters really get updated in the Fourier featurizer, all
we do is compose forecasts for ``n_periods=len(y)`` and then update
``n_``.
Parameters
--... | Update the params and return the transformed arrays
Since no parameters really get updated in the Fourier featurizer, all
we do is compose forecasts for ``n_periods=len(y)`` and then update
``n_``.
Parameters
----------
y : array-like or None, shape=(n_samples,)
... |
def as_dict(self):
"""
Returns dict representations of Xmu object
"""
d = MSONable.as_dict(self)
d["data"] = self.data.tolist()
return d | Returns dict representations of Xmu object |
def in_stroke(self, x, y):
"""Tests whether the given point is inside the area
that would be affected by a :meth:`stroke` operation
given the current path and stroking parameters.
Surface dimensions and clipping are not taken into account.
See :meth:`stroke`, :meth:`set_line_wid... | Tests whether the given point is inside the area
that would be affected by a :meth:`stroke` operation
given the current path and stroking parameters.
Surface dimensions and clipping are not taken into account.
See :meth:`stroke`, :meth:`set_line_width`, :meth:`set_line_join`,
:m... |
def add_source(self, evidence_line, source, label=None, src_type=None):
"""
Applies the triples:
<evidence> <dc:source> <source>
<source> <rdf:type> <type>
<source> <rdfs:label> "label"
TODO this should belong in a higher level class
:param evidence_line: str cur... | Applies the triples:
<evidence> <dc:source> <source>
<source> <rdf:type> <type>
<source> <rdfs:label> "label"
TODO this should belong in a higher level class
:param evidence_line: str curie
:param source: str source as curie
:param label: optional, str type as cu... |
def keyword(self, text):
"""Push a keyword onto the token queue."""
cls = self.KEYWORDS[text]
self.push_token(cls(text, self.lineno, self.offset)) | Push a keyword onto the token queue. |
def connect(self):
""" Construct the psycopg2 connection instance
:return: psycopg2.connect instance
"""
if self._conn:
return self._conn
self._conn = psycopg2.connect(
self.config,
cursor_factory=psycopg2.extras.RealDictCursor,
)
... | Construct the psycopg2 connection instance
:return: psycopg2.connect instance |
def read_namespaced_ingress_status(self, name, namespace, **kwargs): # noqa: E501
"""read_namespaced_ingress_status # noqa: E501
read status of the specified Ingress # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pas... | read_namespaced_ingress_status # noqa: E501
read status of the specified Ingress # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.read_namespaced_ingress_status(name, namespace, asyn... |
def poisson_ll(data, means):
"""
Calculates the Poisson log-likelihood.
Args:
data (array): 2d numpy array of genes x cells
means (array): 2d numpy array of genes x k
Returns:
cells x k array of log-likelihood for each cell/cluster pair
"""
if sparse.issparse(data):
... | Calculates the Poisson log-likelihood.
Args:
data (array): 2d numpy array of genes x cells
means (array): 2d numpy array of genes x k
Returns:
cells x k array of log-likelihood for each cell/cluster pair |
def is_volatile(self):
"""
True if combination of field access properties result in a field that
should be interpreted as volatile.
(Any hardware-writable field is inherently volatile)
"""
hw = self.get_property('hw')
return (
(hw in (rdltypes.AccessT... | True if combination of field access properties result in a field that
should be interpreted as volatile.
(Any hardware-writable field is inherently volatile) |
def paths_wanted(self):
"""The set of paths where we expect to find missing nodes."""
return set(address.new(b, target='all') for b in self.missing_nodes) | The set of paths where we expect to find missing nodes. |
def extract(self, item, list_article_candidate):
"""Compares how often any language was detected.
:param item: The corresponding NewscrawlerItem
:param list_article_candidate: A list, the list of ArticleCandidate-Objects which have been extracted
:return: A string, the language which wa... | Compares how often any language was detected.
:param item: The corresponding NewscrawlerItem
:param list_article_candidate: A list, the list of ArticleCandidate-Objects which have been extracted
:return: A string, the language which was most frequently detected |
def add_to_class(self, cls, name):
'''
Hook that replaces the `Field` attribute on a class with a named
``FieldDescriptor``. Called by the metaclass during construction of the
``Model``.
'''
self._name = name
self._container_model_class = cls
setattr(cls, ... | Hook that replaces the `Field` attribute on a class with a named
``FieldDescriptor``. Called by the metaclass during construction of the
``Model``. |
def cmd_position(self, args):
'''position x-m y-m z-m'''
if (len(args) != 3):
print("Usage: position x y z (meters)")
return
if (len(args) == 3):
x_m = float(args[0])
y_m = float(args[1])
z_m = float(args[2])
print("x:%f, y... | position x-m y-m z-m |
def render(self, rect, data):
"""
Displays the elements according to the align properties.
"""
# Make sure we're aligned correctly
if self.horizontal_align not in VerticalLM._VALID_ALIGN_HORIZONTAL:
raise ValueError('Horizontal align is not valid.')
if self.ve... | Displays the elements according to the align properties. |
def _check_no_current_table(new_obj, current_table):
""" Raises exception if we try to add a relation or a column
with no current table. """
if current_table is None:
msg = 'Cannot add {} before adding table'
if isinstance(new_obj, Relation):
raise NoCurrentTableException(msg.for... | Raises exception if we try to add a relation or a column
with no current table. |
def set_policy(name, table='filter', family='ipv4', **kwargs):
'''
.. versionadded:: 2014.1.0
Sets the default policy for iptables firewall tables
table
The table that owns the chain that should be modified
family
Networking family, either ipv4 or ipv6
policy
The requ... | .. versionadded:: 2014.1.0
Sets the default policy for iptables firewall tables
table
The table that owns the chain that should be modified
family
Networking family, either ipv4 or ipv6
policy
The requested table policy |
def _run_grid_multithread(self, func, iterables):
''' running case with mutil process to support selenium grid-mode(multiple web) and appium grid-mode(multiple devices).
@param func: function object
@param iterables: iterable objects
'''
f = lambda x: threading.... | running case with mutil process to support selenium grid-mode(multiple web) and appium grid-mode(multiple devices).
@param func: function object
@param iterables: iterable objects |
def create(self,image_path, size=1024, sudo=False):
'''create will create a a new image
Parameters
==========
image_path: full path to image
size: image sizein MiB, default is 1024MiB
filesystem: supported file systems ext3/ext4 (ext[2/3]: default ext3
'''
f... | create will create a a new image
Parameters
==========
image_path: full path to image
size: image sizein MiB, default is 1024MiB
filesystem: supported file systems ext3/ext4 (ext[2/3]: default ext3 |
def node_inclusion_predicate_builder(nodes: Iterable[BaseEntity]) -> NodePredicate:
"""Build a function that returns true for the given nodes."""
nodes = set(nodes)
@node_predicate
def node_inclusion_predicate(node: BaseEntity) -> bool:
"""Return true if the node is in the given set of nodes.""... | Build a function that returns true for the given nodes. |
def get_buckets(min_length, max_length, bucket_count):
'''
Get bucket by length.
'''
if bucket_count <= 0:
return [max_length]
unit_length = int((max_length - min_length) // (bucket_count))
buckets = [min_length + unit_length *
(i + 1) for i in range(0, bucket_count)]
... | Get bucket by length. |
def process_response(self, request_id=None):
"""
Process the SAML Response sent by the IdP.
:param request_id: Is an optional argument. Is the ID of the AuthNRequest sent by this SP to the IdP.
:type request_id: string
:raises: OneLogin_Saml2_Error.SAML_RESPONSE_NOT_FOUND, when... | Process the SAML Response sent by the IdP.
:param request_id: Is an optional argument. Is the ID of the AuthNRequest sent by this SP to the IdP.
:type request_id: string
:raises: OneLogin_Saml2_Error.SAML_RESPONSE_NOT_FOUND, when a POST with a SAMLResponse is not found |
def add_torques(self, torques):
'''Add torques for each degree of freedom in the skeleton.
Parameters
----------
torques : list of float
A list of the torques to add to each degree of freedom in the
skeleton.
'''
j = 0
for joint in self.jo... | Add torques for each degree of freedom in the skeleton.
Parameters
----------
torques : list of float
A list of the torques to add to each degree of freedom in the
skeleton. |
def _value_with_fmt(self, val):
"""Convert numpy types to Python types for the Excel writers.
Parameters
----------
val : object
Value to be written into cells
Returns
-------
Tuple with the first element being the converted value and the second
... | Convert numpy types to Python types for the Excel writers.
Parameters
----------
val : object
Value to be written into cells
Returns
-------
Tuple with the first element being the converted value and the second
being an optional format |
def delete_component(self, id):
"""Delete component by id.
:param id: ID of the component to use
:type id: str
:rtype: Response
"""
url = self._get_url('component/' + str(id))
return self._session.delete(url) | Delete component by id.
:param id: ID of the component to use
:type id: str
:rtype: Response |
def _exclude_pattern(self, pattern, anchor=True, prefix=None,
is_regex=False):
"""Remove strings (presumably filenames) from 'files' that match
'pattern'.
Other parameters are the same as for 'include_pattern()', above.
The list 'self.files' is modified in place... | Remove strings (presumably filenames) from 'files' that match
'pattern'.
Other parameters are the same as for 'include_pattern()', above.
The list 'self.files' is modified in place. Return True if files are
found.
This API is public to allow e.g. exclusion of SCM subdirs, e.g. ... |
def list_current_filter_set(self,raw=False):
"""User to list a currently selected filter set"""
buf = []
self.open_umanager()
self.ser.write(''.join((self.cmd_current_filter_list,self.cr)))
if self.read_loop(lambda x: x.endswith(self.umanager_prompt),self.timeout,lambda... | User to list a currently selected filter set |
def update_launch_metadata(self, scaling_group, metadata):
"""
Adds the given metadata dict to the existing metadata for the scaling
group's launch configuration.
"""
if not isinstance(scaling_group, ScalingGroup):
scaling_group = self.get(scaling_group)
curr_... | Adds the given metadata dict to the existing metadata for the scaling
group's launch configuration. |
def ban_show(self, ban_id, **kwargs):
"https://developer.zendesk.com/rest_api/docs/chat/bans#get-ban"
api_path = "/api/v2/bans/{ban_id}"
api_path = api_path.format(ban_id=ban_id)
return self.call(api_path, **kwargs) | https://developer.zendesk.com/rest_api/docs/chat/bans#get-ban |
def _compute_f1(self, C, mag, rrup):
"""
Compute f1 term (eq.4, page 105)
"""
r = np.sqrt(rrup ** 2 + C['c4'] ** 2)
f1 = (
C['a1'] +
C['a12'] * (8.5 - mag) ** C['n'] +
(C['a3'] + C['a13'] * (mag - C['c1'])) * np.log(r)
)
if ma... | Compute f1 term (eq.4, page 105) |
def is_dict_equal(d1, d2, keys=None, ignore_none_values=True):
"""
Compares two dictionaries to see if they are equal
:param d1: the first dictionary
:param d2: the second dictionary
:param keys: the keys to limit the comparison to (optional)
:param ignore_none_values: whether to ignore none val... | Compares two dictionaries to see if they are equal
:param d1: the first dictionary
:param d2: the second dictionary
:param keys: the keys to limit the comparison to (optional)
:param ignore_none_values: whether to ignore none values
:return: true if the dictionaries are equal, else false |
def plot_and_save(self, **kwargs):
"""Used when the plot method defined does not create a figure nor calls save_plot
Then the plot method has to use self.fig"""
self.fig = pyplot.figure()
self.plot()
self.axes = pyplot.gca()
self.save_plot(self.fig, self.axes, **kwargs)
... | Used when the plot method defined does not create a figure nor calls save_plot
Then the plot method has to use self.fig |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.