code stringlengths 75 104k | docstring stringlengths 1 46.9k |
|---|---|
def _link_or_update_vars(self):
"""
Creates or updates the symlink to group_vars and returns None.
:returns: None
"""
for d, source in self.links.items():
target = os.path.join(self.inventory_directory, d)
source = os.path.join(self._config.scenario.direc... | Creates or updates the symlink to group_vars and returns None.
:returns: None |
def align(*objects, **kwargs):
"""align(*objects, join='inner', copy=True, indexes=None,
exclude=frozenset())
Given any number of Dataset and/or DataArray objects, returns new
objects with aligned indexes and dimension sizes.
Array from the aligned objects are suitable as input to mathema... | align(*objects, join='inner', copy=True, indexes=None,
exclude=frozenset())
Given any number of Dataset and/or DataArray objects, returns new
objects with aligned indexes and dimension sizes.
Array from the aligned objects are suitable as input to mathematical
operators, because along eac... |
def bundles(ctx):
"""
List discovered bundles.
"""
bundles = _get_bundles(ctx.obj.data['env'])
print_table(('Name', 'Location'),
[(bundle.name, f'{bundle.__module__}.{bundle.__class__.__name__}')
for bundle in bundles]) | List discovered bundles. |
def parse_array(raw_array):
"""Parse a WMIC array."""
array_strip_brackets = raw_array.replace('{', '').replace('}', '')
array_strip_spaces = array_strip_brackets.replace('"', '').replace(' ', '')
return array_strip_spaces.split(',') | Parse a WMIC array. |
def createModel(modelName, **kwargs):
"""
Return a classification model of the appropriate type. The model could be any
supported subclass of ClassficationModel based on modelName.
@param modelName (str) A supported temporal memory type
@param kwargs (dict) Constructor argument for the class that will b... | Return a classification model of the appropriate type. The model could be any
supported subclass of ClassficationModel based on modelName.
@param modelName (str) A supported temporal memory type
@param kwargs (dict) Constructor argument for the class that will be
instantiated. Keyw... |
def run_next(self):
"""
Run the next item in the queue (a job waiting to run).
"""
while 1:
(op, obj) = self.work_queue.get()
if op is STOP_SIGNAL:
return
try:
(job_id, command_line) = obj
try:
... | Run the next item in the queue (a job waiting to run). |
def list_sources(embedding_name=None):
"""Get valid token embedding names and their pre-trained file names.
To load token embedding vectors from an externally hosted pre-trained token embedding file,
such as those of GloVe and FastText, one should use
`gluonnlp.embedding.create(embedding_name, source)... | Get valid token embedding names and their pre-trained file names.
To load token embedding vectors from an externally hosted pre-trained token embedding file,
such as those of GloVe and FastText, one should use
`gluonnlp.embedding.create(embedding_name, source)`. This method returns all the
valid names... |
def cumulative_sum(self):
"""
Return the cumulative sum of the elements in the SArray.
Returns an SArray where each element in the output corresponds to the
sum of all the elements preceding and including it. The SArray is
expected to be of numeric type (int, float), or a numeri... | Return the cumulative sum of the elements in the SArray.
Returns an SArray where each element in the output corresponds to the
sum of all the elements preceding and including it. The SArray is
expected to be of numeric type (int, float), or a numeric vector type.
Returns
------... |
def _get_all_children(self,):
"""
return the list of children of a node
"""
res = ''
if self.child_nodes:
for c in self.child_nodes:
res += ' child = ' + str(c) + '\n'
if c.child_nodes:
for grandchild in c.child_nod... | return the list of children of a node |
def get_context_file_name(pid_file):
"""When the daemon is started write out the information which port it was using."""
root = os.path.dirname(pid_file)
port_file = os.path.join(root, "context.json")
return port_file | When the daemon is started write out the information which port it was using. |
def do_info(self, arg, arguments):
"""
::
Usage:
info [--all]
Options:
--all -a more extensive information
Prints some internal information about the shell
"""
if arguments["--all"]:
Console.ok(... | ::
Usage:
info [--all]
Options:
--all -a more extensive information
Prints some internal information about the shell |
def go_to_step(self, step):
"""Set the stacked widget to the given step, set up the buttons,
and run all operations that should start immediately after
entering the new step.
:param step: The step widget to be moved to.
:type step: WizardStep
"""
self.stack... | Set the stacked widget to the given step, set up the buttons,
and run all operations that should start immediately after
entering the new step.
:param step: The step widget to be moved to.
:type step: WizardStep |
def get_full_current_object(arn, current_model):
"""
Utility method to fetch items from the Current table if they are too big for SNS/SQS.
:param record:
:param current_model:
:return:
"""
LOG.debug(f'[-->] Item with ARN: {arn} was too big for SNS -- fetching it from the Current table...')
... | Utility method to fetch items from the Current table if they are too big for SNS/SQS.
:param record:
:param current_model:
:return: |
def connect(self):
"""
Connects to the Deluge instance
"""
self._connect()
logger.debug('Connected to Deluge, detecting daemon version')
self._detect_deluge_version()
logger.debug('Daemon version {} detected, logging in'.format(self.deluge_version))
if sel... | Connects to the Deluge instance |
def job_terminate(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /job-xxxx/terminate API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Applets-and-Entry-Points#API-method%3A-%2Fjob-xxxx%2Fterminate
"""
return DXHTTPRequest('/%s/terminate' ... | Invokes the /job-xxxx/terminate API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Applets-and-Entry-Points#API-method%3A-%2Fjob-xxxx%2Fterminate |
def remove(self, path, recursive=True):
"""
Remove file or directory at location `path`.
"""
if recursive:
cmd = ["rm", "-r", path]
else:
cmd = ["rm", path]
self.remote_context.check_output(cmd) | Remove file or directory at location `path`. |
def make_command(tasks, *args, **kwargs):
"""
Create a TaskCommand with defined tasks.
This is a helper function to avoid boiletplate when dealing with simple
cases (e.g., all cli arguments can be handled by TaskCommand), with no
special processing. In general, this means a command only needs to r... | Create a TaskCommand with defined tasks.
This is a helper function to avoid boiletplate when dealing with simple
cases (e.g., all cli arguments can be handled by TaskCommand), with no
special processing. In general, this means a command only needs to run
established tasks.
Arguments:
tasks - ... |
def serve_forever(self, poll_interval=0.5):
"""Handle one request at a time until shutdown.
Polls for shutdown every poll_interval seconds. Ignores
self.timeout. If you need to do periodic tasks, do them in
another thread.
"""
self._serving_event.set()
self._shut... | Handle one request at a time until shutdown.
Polls for shutdown every poll_interval seconds. Ignores
self.timeout. If you need to do periodic tasks, do them in
another thread. |
def _get_nets_other(self, *args, **kwargs):
"""
Deprecated. This will be removed in a future release.
"""
from warnings import warn
warn('Whois._get_nets_other() has been deprecated and will be '
'removed. You should now use Whois.get_nets_other().')
return ... | Deprecated. This will be removed in a future release. |
def query_mongo_sort_decend(
database_name,
collection_name,
query={},
skip=0,
limit=getattr(
settings,
'MONGO_LIMIT',
200),
return_keys=(),
sortkey=None):
"""return a response_dict with a list of search results in decending
... | return a response_dict with a list of search results in decending
order based on a sort key |
def setbit(self, key, offset, value):
"""Sets or clears the bit at offset in the string value stored at key.
:raises TypeError: if offset is not int
:raises ValueError: if offset is less than 0 or value is not 0 or 1
"""
if not isinstance(offset, int):
raise TypeErro... | Sets or clears the bit at offset in the string value stored at key.
:raises TypeError: if offset is not int
:raises ValueError: if offset is less than 0 or value is not 0 or 1 |
def getChildren(self, forgetter, field=None, where=None, orderBy=None):
"""Return the children that links to me.
That means that I have to be listed in their _userClasses
somehow. If field is specified, that field in my children is
used as the pointer to me. Use this if you have multipl... | Return the children that links to me.
That means that I have to be listed in their _userClasses
somehow. If field is specified, that field in my children is
used as the pointer to me. Use this if you have multiple fields
referring to my class. |
async def call_command(bot: NoneBot, ctx: Context_T,
name: Union[str, CommandName_T], *,
current_arg: str = '',
args: Optional[CommandArgs_T] = None,
check_perm: bool = True,
disable_interaction: bool = Fa... | Call a command internally.
This function is typically called by some other commands
or "handle_natural_language" when handling NLPResult object.
Note: If disable_interaction is not True, after calling this function,
any previous command session will be overridden, even if the command
being called ... |
def parse_extension_arg(arg, arg_dict):
"""
Converts argument strings in key=value or key.namespace=value form
to dictionary entries
Parameters
----------
arg : str
The argument string to parse, which must be in key=value or
key.namespace=value form.
arg_dict : dict
... | Converts argument strings in key=value or key.namespace=value form
to dictionary entries
Parameters
----------
arg : str
The argument string to parse, which must be in key=value or
key.namespace=value form.
arg_dict : dict
The dictionary into which the key/value pair will be... |
def _parse_abbreviation(self, abbr):
"""
Parse a team's abbreviation.
Given the team's HTML name tag, parse their abbreviation.
Parameters
----------
abbr : string
A string of a team's HTML name tag.
Returns
-------
string
... | Parse a team's abbreviation.
Given the team's HTML name tag, parse their abbreviation.
Parameters
----------
abbr : string
A string of a team's HTML name tag.
Returns
-------
string
Returns a ``string`` of the team's abbreviation. |
def delete(self):
'Delete this file and return the new, deleted JFSFile'
#url = '%s?dl=true' % self.path
r = self.jfs.post(url=self.path, params={'dl':'true'})
return r | Delete this file and return the new, deleted JFSFile |
def inputAnalyzeCallback(self, *args, **kwargs):
"""
Test method for inputAnalzeCallback
This method loops over the passed number of files,
and optionally "delays" in each loop to simulate
some analysis. The delay length is specified by
the '--test <delay>' flag.
... | Test method for inputAnalzeCallback
This method loops over the passed number of files,
and optionally "delays" in each loop to simulate
some analysis. The delay length is specified by
the '--test <delay>' flag. |
def _reproduce_stages(
G,
stages,
node,
force,
dry,
interactive,
ignore_build_cache,
no_commit,
downstream,
):
r"""Derive the evaluation of the given node for the given graph.
When you _reproduce a stage_, you want to _evaluate the descendants_
to know if it make sense t... | r"""Derive the evaluation of the given node for the given graph.
When you _reproduce a stage_, you want to _evaluate the descendants_
to know if it make sense to _recompute_ it. A post-ordered search
will give us an order list of the nodes we want.
For example, let's say that we have the following pip... |
def _get_data(self, url, config, send_sc=True):
"""
Hit a given URL and return the parsed json
"""
# Load basic authentication configuration, if available.
if config.username and config.password:
auth = (config.username, config.password)
else:
auth... | Hit a given URL and return the parsed json |
async def restart(request):
"""
Returns OK, then waits approximately 1 second and restarts container
"""
def wait_and_restart():
log.info('Restarting server')
sleep(1)
os.system('kill 1')
Thread(target=wait_and_restart).start()
return web.json_response({"message": "restar... | Returns OK, then waits approximately 1 second and restarts container |
def _build_kreemer_cell(data, loc):
'''
Constructs the "Kreemer Cell" from the input file. The Kreemer cell is
simply a set of five lines describing the four nodes of the square (closed)
:param list data:
Strain data as list of text lines (input from linecache.getlines)
:param int loc:
... | Constructs the "Kreemer Cell" from the input file. The Kreemer cell is
simply a set of five lines describing the four nodes of the square (closed)
:param list data:
Strain data as list of text lines (input from linecache.getlines)
:param int loc:
Pointer to location in data
:returns:
... |
def simulation_manager(self, thing=None, **kwargs):
"""
Constructs a new simulation manager.
:param thing: Optional - What to put in the new SimulationManager's active stash (either a SimState or a list of SimStates).
:param kwargs: Any additional keyword arguments wi... | Constructs a new simulation manager.
:param thing: Optional - What to put in the new SimulationManager's active stash (either a SimState or a list of SimStates).
:param kwargs: Any additional keyword arguments will be passed to the SimulationManager constructor
:returns: ... |
def _variant_levels(level, variant):
"""
Gets the level for the variant.
:param int level: the current variant level
:param int variant: the value for this level if variant
:returns: a level for the object and one for the function
:rtype: int * int
"""
r... | Gets the level for the variant.
:param int level: the current variant level
:param int variant: the value for this level if variant
:returns: a level for the object and one for the function
:rtype: int * int |
def _read(self, directory, filename, session, path, name, extension, spatial, spatialReferenceID, replaceParamFile):
"""
Private file object read method. Classes that inherit from this base class must implement this method.
The ``read()`` method that each file object inherits from this base cla... | Private file object read method. Classes that inherit from this base class must implement this method.
The ``read()`` method that each file object inherits from this base class performs the processes common to all
file read methods, after which it calls the file object's ``_read()`` (the preceding unde... |
def escape(u):
"""Escape a string in an OAuth-compatible fashion.
TODO: verify whether this can in fact be used for OAuth 2
"""
if not isinstance(u, unicode_type):
raise ValueError('Only unicode objects are escapable.')
return quote(u.encode('utf-8'), safe=b'~') | Escape a string in an OAuth-compatible fashion.
TODO: verify whether this can in fact be used for OAuth 2 |
def _constexpr_transform(fn):
"""
>>> from Redy.Opt.ConstExpr import constexpr, const, optimize, macro
>>> import dis
>>> a = 1; b = ""; c = object()
>>> x = 1
>>> @optimize
>>> def f(y):
>>> val1: const[int] = a
>>> val2: const = b
>>> if constexpr[x is c]:
>>> ... | >>> from Redy.Opt.ConstExpr import constexpr, const, optimize, macro
>>> import dis
>>> a = 1; b = ""; c = object()
>>> x = 1
>>> @optimize
>>> def f(y):
>>> val1: const[int] = a
>>> val2: const = b
>>> if constexpr[x is c]:
>>> return val1, y
>>> elif con... |
def CreateBiddingStrategy(client):
"""Creates a bidding strategy object.
Args:
client: AdWordsClient the client to run the example with.
Returns:
dict An object representing a bidding strategy.
"""
# Initialize appropriate service.
bidding_strategy_service = client.GetService(
'BiddingStrate... | Creates a bidding strategy object.
Args:
client: AdWordsClient the client to run the example with.
Returns:
dict An object representing a bidding strategy. |
def load(filename: str, format: str = None):
"""Load a task file and get a ``Project`` back."""
path = Path(filename).resolve()
with path.open() as file:
data = file.read()
if format is None:
loader, error_class = _load_autodetect, InvalidMofileFormat
else:
try:
... | Load a task file and get a ``Project`` back. |
def dump(self, output, close_after_write=True):
"""Write data to the output with tabular format.
Args:
output (file descriptor or str):
file descriptor or path to the output file.
close_after_write (bool, optional):
Close the output after write.
... | Write data to the output with tabular format.
Args:
output (file descriptor or str):
file descriptor or path to the output file.
close_after_write (bool, optional):
Close the output after write.
Defaults to |True|. |
def divide(self, other, out=None):
"""Return ``out = self / other``.
If ``out`` is provided, the result is written to it.
See Also
--------
LinearSpace.divide
"""
return self.space.divide(self, other, out=out) | Return ``out = self / other``.
If ``out`` is provided, the result is written to it.
See Also
--------
LinearSpace.divide |
def flat_images(images, grid=None, bfill=1.0, bsz=(1, 1)):
"""
convert batch image to flat image with margin inserted
[B,h,w,c] => [H,W,c]
:param images:
:param grid: patch grid cell size of (Row, Col)
:param bfill: board filling value
:param bsz: int or (int, int) board size
:return: fl... | convert batch image to flat image with margin inserted
[B,h,w,c] => [H,W,c]
:param images:
:param grid: patch grid cell size of (Row, Col)
:param bfill: board filling value
:param bsz: int or (int, int) board size
:return: flatted image |
def _get_exchange_key_ntlm_v1(negotiate_flags, session_base_key, server_challenge, lm_challenge_response, lm_hash):
"""
[MS-NLMP] v28.0 2016-07-14
4.3.5.1 KXKEY
Calculates the Key Exchange Key for NTLMv1 authentication. Used for signing and sealing messages
@param negotiate_flags:
@param sessi... | [MS-NLMP] v28.0 2016-07-14
4.3.5.1 KXKEY
Calculates the Key Exchange Key for NTLMv1 authentication. Used for signing and sealing messages
@param negotiate_flags:
@param session_base_key: A session key calculated from the user password challenge
@param server_challenge: A random 8-byte response gen... |
def estimate_tau_exp(chains, **kwargs):
"""
Estimate the exponential auto-correlation time for all parameters in a chain.
"""
# Calculate the normalised autocorrelation function in each parameter.
rho = np.nan * np.ones(chains.shape[1:])
for i in range(chains.shape[2]):
try:
... | Estimate the exponential auto-correlation time for all parameters in a chain. |
async def setup_watchdog(self, cb, timeout):
"""Trigger a reconnect after @timeout seconds of inactivity."""
self._watchdog_timeout = timeout
self._watchdog_cb = cb
self._watchdog_task = self.loop.create_task(self._watchdog(timeout)) | Trigger a reconnect after @timeout seconds of inactivity. |
def unzoom_all(self,event=None,panel=None):
"""zoom out full data range """
if panel is None: panel = self.current_panel
self.panels[panel].unzoom_all(event=event) | zoom out full data range |
def RandomShuffle(a, seed):
"""
Random uniform op.
"""
if seed:
np.random.seed(seed)
r = a.copy()
np.random.shuffle(r)
return r, | Random uniform op. |
def write(self, output_buffer, kmip_version=enums.KMIPVersion.KMIP_1_0):
"""
Write the data encoding the DeriveKey request payload to a stream.
Args:
output_buffer (stream): A data stream in which to encode object
data, supporting a write method; usually a BytearrayS... | Write the data encoding the DeriveKey request payload to a stream.
Args:
output_buffer (stream): A data stream in which to encode object
data, supporting a write method; usually a BytearrayStream
object.
kmip_version (KMIPVersion): An enumeration defining... |
def _report_self(self):
'''
Reports the kafka monitor uuid to redis
'''
key = "stats:kafka-monitor:self:{m}:{u}".format(
m=socket.gethostname(),
u=self.my_uuid)
self.redis_conn.set(key, time.time())
self.redis_conn.expire(key, self.settings['HEARTB... | Reports the kafka monitor uuid to redis |
def _filter_pb(field_or_unary):
"""Convert a specific protobuf filter to the generic filter type.
Args:
field_or_unary (Union[google.cloud.proto.firestore.v1beta1.\
query_pb2.StructuredQuery.FieldFilter, google.cloud.proto.\
firestore.v1beta1.query_pb2.StructuredQuery.FieldFilte... | Convert a specific protobuf filter to the generic filter type.
Args:
field_or_unary (Union[google.cloud.proto.firestore.v1beta1.\
query_pb2.StructuredQuery.FieldFilter, google.cloud.proto.\
firestore.v1beta1.query_pb2.StructuredQuery.FieldFilter]): A
field or unary filte... |
def charge_balance(self):
r'''Charge imbalance of the mixture, in units of [faraday].
Mixtures meeting the electroneutrality condition will have an imbalance
of 0.
Examples
--------
>>> Mixture(['Na+', 'Cl-', 'water'], zs=[.01, .01, .98]).charge_balance
0.0
... | r'''Charge imbalance of the mixture, in units of [faraday].
Mixtures meeting the electroneutrality condition will have an imbalance
of 0.
Examples
--------
>>> Mixture(['Na+', 'Cl-', 'water'], zs=[.01, .01, .98]).charge_balance
0.0 |
def read_message_from_pipe(pipe_handle):
"""
(coroutine)
Read message from this pipe. Return text.
"""
data = yield From(read_message_bytes_from_pipe(pipe_handle))
assert isinstance(data, bytes)
raise Return(data.decode('utf-8', 'ignore')) | (coroutine)
Read message from this pipe. Return text. |
def set_content_type(self):
"""
Set the content type based on the file extension used in the object
name.
"""
if self.object_name and not self.content_type:
# XXX nothing is currently done with the encoding... we may
# need to in the future
sel... | Set the content type based on the file extension used in the object
name. |
def hide_busy(self):
"""Unlock buttons A helper function to indicate processing is done."""
self.progress_bar.hide()
self.parent.pbnNext.setEnabled(True)
self.parent.pbnBack.setEnabled(True)
self.parent.pbnCancel.setEnabled(True)
self.parent.repaint()
disable_busy... | Unlock buttons A helper function to indicate processing is done. |
def execute_on_key_owner(self, key, task):
"""
Executes a task on the owner of the specified key.
:param key: (object), the specified key.
:param task: (Task), a task executed on the owner of the specified key.
:return: (:class:`~hazelcast.future.Future`), future representing pe... | Executes a task on the owner of the specified key.
:param key: (object), the specified key.
:param task: (Task), a task executed on the owner of the specified key.
:return: (:class:`~hazelcast.future.Future`), future representing pending completion of the task. |
def truncate(sequence):
"""
Create a potentially shortened text display of a list.
Parameters
----------
sequence : list
An indexable sequence of elements.
Returns
-------
str
The list as a formatted string.
"""
if len(sequence) > LIST_SLICE:
return ", ... | Create a potentially shortened text display of a list.
Parameters
----------
sequence : list
An indexable sequence of elements.
Returns
-------
str
The list as a formatted string. |
def run_server(self, port):
"""run a server binding to port"""
try:
self.server = MultiThreadedHTTPServer(('0.0.0.0', port), Handler)
except socket.error, e: # failed to bind port
logger.error(str(e))
sys.exit(1)
logger.info("HTTP serve at http://0.... | run a server binding to port |
def filter_taxa(fasta_path: 'path to fasta input',
taxids: 'comma delimited list of taxon IDs',
unclassified: 'pass sequences unclassified at superkingdom level >(0)' = False,
discard: 'discard specified taxa' = False,
warnings: 'show warnings' = False):
... | Customisable filtering of tictax flavoured fasta files |
def register_id(self, id, module):
"""Associate the given id with the given project module."""
assert isinstance(id, basestring)
assert isinstance(module, basestring)
self.id2module[id] = module | Associate the given id with the given project module. |
def delete(self, key):
'''Removes the object named by `key`.
Args:
key: Key naming the object to remove.
'''
path = self.object_path(key)
if os.path.exists(path):
os.remove(path) | Removes the object named by `key`.
Args:
key: Key naming the object to remove. |
def clear(self, *args):
"""
Set default values to **self.fields_to_clear**. In addition, it is possible to pass extra fields to clear.
:param args: extra fields to clear.
"""
for field in self.fields_to_clear + list(args):
setattr(self, field, None) | Set default values to **self.fields_to_clear**. In addition, it is possible to pass extra fields to clear.
:param args: extra fields to clear. |
def exists(self, symbol):
"""Checks to if a symbol exists, by name.
Parameters
----------
symbol : str or Symbol
Returns
-------
bool
"""
if isinstance(symbol, str):
sym = symbol
elif isinstance(symbol, ... | Checks to if a symbol exists, by name.
Parameters
----------
symbol : str or Symbol
Returns
-------
bool |
def ParseFileObject(self, parser_mediator, file_object):
"""Parses a bencoded file-like object.
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
file_object (dfvfs.FileIO): a file-like object.
Raises:
... | Parses a bencoded file-like object.
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
file_object (dfvfs.FileIO): a file-like object.
Raises:
UnableToParseFile: when the file cannot be parsed. |
def LDREX(cpu, dest, src, offset=None):
"""
LDREX loads data from memory.
* If the physical address has the shared TLB attribute, LDREX
tags the physical address as exclusive access for the current
processor, and clears any exclusive access tag for this
processor fo... | LDREX loads data from memory.
* If the physical address has the shared TLB attribute, LDREX
tags the physical address as exclusive access for the current
processor, and clears any exclusive access tag for this
processor for any other physical address.
* Otherwise, it tags t... |
def _create_field_vectors(self):
"""Builds a vector space model of every document using lunr.Vector."""
field_vectors = {}
term_idf_cache = {}
for field_ref, term_frequencies in self.field_term_frequencies.items():
_field_ref = FieldRef.from_string(field_ref)
fie... | Builds a vector space model of every document using lunr.Vector. |
def p_systemcall_signed(self, p): # for $signed system task
'systemcall : DOLLER SIGNED LPAREN sysargs RPAREN'
p[0] = SystemCall(p[2], p[4], lineno=p.lineno(1))
p.set_lineno(0, p.lineno(1)) | systemcall : DOLLER SIGNED LPAREN sysargs RPAREN |
def setup_conf(conf_globals):
"""
Setup function that is called from within the project's
docs/conf.py module that takes the conf module's globals() and
assigns the values that can be automatically determined from the
current project, such as project name, package name, version and
author.
"... | Setup function that is called from within the project's
docs/conf.py module that takes the conf module's globals() and
assigns the values that can be automatically determined from the
current project, such as project name, package name, version and
author. |
def lookup_rdap(self, hr=True, show_name=False, colorize=True, **kwargs):
"""
The function for wrapping IPWhois.lookup_rdap() and generating
formatted CLI output.
Args:
hr (:obj:`bool`): Enable human readable key translations. Defaults
to True.
sh... | The function for wrapping IPWhois.lookup_rdap() and generating
formatted CLI output.
Args:
hr (:obj:`bool`): Enable human readable key translations. Defaults
to True.
show_name (:obj:`bool`): Show human readable name (default is to
only show short... |
def update(self):
"""Update the host/system info using the input method.
Return the stats (dict)
"""
# Init new stats
stats = self.get_init_value()
if self.input_method == 'local':
# Update stats using the standard system lib
stats['os_name'] = p... | Update the host/system info using the input method.
Return the stats (dict) |
def to_bytesize(value, default_unit=None, base=DEFAULT_BASE):
"""Convert `value` to bytes, accepts notations such as "4k" to mean 4096 bytes
Args:
value (str | unicode | int | None): Number of bytes optionally suffixed by a char from UNITS
default_unit (str | unicode | None): Default unit to us... | Convert `value` to bytes, accepts notations such as "4k" to mean 4096 bytes
Args:
value (str | unicode | int | None): Number of bytes optionally suffixed by a char from UNITS
default_unit (str | unicode | None): Default unit to use for unqualified values
base (int): Base to use (usually 102... |
def _to_dict(self):
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'available') and self.available is not None:
_dict['available'] = self.available
if hasattr(self, 'processing') and self.processing is not None:
_dict['processi... | Return a json dictionary representing this model. |
def dist_eudex(src, tar, weights='exponential', max_length=8):
"""Return normalized Hamming distance between Eudex hashes of two terms.
This is a wrapper for :py:meth:`Eudex.dist`.
Parameters
----------
src : str
Source string for comparison
tar : str
Target string for comparis... | Return normalized Hamming distance between Eudex hashes of two terms.
This is a wrapper for :py:meth:`Eudex.dist`.
Parameters
----------
src : str
Source string for comparison
tar : str
Target string for comparison
weights : str, iterable, or generator function
The weig... |
def loadfile(args):
'''load a log file (path given by arg)'''
mestate.console.write("Loading %s...\n" % args)
t0 = time.time()
mlog = mavutil.mavlink_connection(args, notimestamps=False,
zero_time_base=False,
progress_callback=p... | load a log file (path given by arg) |
def fastcc_consistent_subset(model, epsilon, solver):
"""Return consistent subset of model.
The largest consistent subset is returned as
a set of reaction names.
Args:
model: :class:`MetabolicModel` to solve.
epsilon: Flux threshold value.
solver: LP solver instance to use.
... | Return consistent subset of model.
The largest consistent subset is returned as
a set of reaction names.
Args:
model: :class:`MetabolicModel` to solve.
epsilon: Flux threshold value.
solver: LP solver instance to use.
Returns:
Set of reaction IDs in the consistent reac... |
def update(self):
"""Update |TTM| based on :math:`TTM = TT+DTTM`.
>>> from hydpy.models.hland import *
>>> parameterstep('1d')
>>> nmbzones(1)
>>> zonetype(FIELD)
>>> tt(1.0)
>>> dttm(-2.0)
>>> derived.ttm.update()
>>> derived.ttm
ttm(-1.0... | Update |TTM| based on :math:`TTM = TT+DTTM`.
>>> from hydpy.models.hland import *
>>> parameterstep('1d')
>>> nmbzones(1)
>>> zonetype(FIELD)
>>> tt(1.0)
>>> dttm(-2.0)
>>> derived.ttm.update()
>>> derived.ttm
ttm(-1.0) |
def sync(self):
"""
Fetch the list of apps from Marathon, find the domains that require
certificates, and issue certificates for any domains that don't already
have a certificate.
"""
self.log.info('Starting a sync...')
def log_success(result):
self.l... | Fetch the list of apps from Marathon, find the domains that require
certificates, and issue certificates for any domains that don't already
have a certificate. |
def _from_any_pb(pb_type, any_pb):
"""Converts an Any protobuf to the specified message type
Args:
pb_type (type): the type of the message that any_pb stores an instance
of.
any_pb (google.protobuf.any_pb2.Any): the object to be converted.
Returns:
pb_type: An instance ... | Converts an Any protobuf to the specified message type
Args:
pb_type (type): the type of the message that any_pb stores an instance
of.
any_pb (google.protobuf.any_pb2.Any): the object to be converted.
Returns:
pb_type: An instance of the pb_type message.
Raises:
... |
def replace_all(expression: Expression, rules: Iterable[ReplacementRule], max_count: int=math.inf) \
-> Union[Expression, Sequence[Expression]]:
"""Replace all occurrences of the patterns according to the replacement rules.
A replacement rule consists of a *pattern*, that is matched against any subexpr... | Replace all occurrences of the patterns according to the replacement rules.
A replacement rule consists of a *pattern*, that is matched against any subexpression
of the expression. If a match is found, the *replacement* callback of the rule is called with
the variables from the match substitution. Whatever... |
def _get_py_dictionary(self, var, names=None, used___dict__=False):
'''
:return tuple(names, used___dict__), where used___dict__ means we have to access
using obj.__dict__[name] instead of getattr(obj, name)
'''
# TODO: Those should be options (would fix https://github.com/Micro... | :return tuple(names, used___dict__), where used___dict__ means we have to access
using obj.__dict__[name] instead of getattr(obj, name) |
def batch_to_ids(batch: List[List[str]]) -> torch.Tensor:
"""
Converts a batch of tokenized sentences to a tensor representing the sentences with encoded characters
(len(batch), max sentence length, max word length).
Parameters
----------
batch : ``List[List[str]]``, required
A list of ... | Converts a batch of tokenized sentences to a tensor representing the sentences with encoded characters
(len(batch), max sentence length, max word length).
Parameters
----------
batch : ``List[List[str]]``, required
A list of tokenized sentences.
Returns
-------
A tensor of padd... |
def show_tree(model=None):
"""Display the model tree window.
Args:
model: :class:`Model <modelx.core.model.Model>` object.
Defaults to the current model.
Warnings:
For this function to work with Spyder, *Graphics backend* option
of Spyder must be set to *inline*.
""... | Display the model tree window.
Args:
model: :class:`Model <modelx.core.model.Model>` object.
Defaults to the current model.
Warnings:
For this function to work with Spyder, *Graphics backend* option
of Spyder must be set to *inline*. |
def parse_phones(self):
"""Parse TextGrid phone intervals.
This method parses the phone intervals in a TextGrid to extract each
phone and each phone's start and end times in the audio recording. For
each phone, it instantiates the class Phone(), with the phone and its
st... | Parse TextGrid phone intervals.
This method parses the phone intervals in a TextGrid to extract each
phone and each phone's start and end times in the audio recording. For
each phone, it instantiates the class Phone(), with the phone and its
start and end times as attributes of ... |
def resize(self, width: int, height: int):
"""
Replacement for Qt's resizeGL method.
"""
self.width = width // self.widget.devicePixelRatio()
self.height = height // self.widget.devicePixelRatio()
self.buffer_width = width
self.buffer_height = height
... | Replacement for Qt's resizeGL method. |
def get_url(access_token, endpoint=ams_rest_endpoint, flag=True):
'''Get Media Services Final Endpoint URL.
Args:
access_token (str): A valid Azure authentication token.
endpoint (str): Azure Media Services Initial Endpoint.
flag (bol): flag.
Returns:
HTTP response. JSON bod... | Get Media Services Final Endpoint URL.
Args:
access_token (str): A valid Azure authentication token.
endpoint (str): Azure Media Services Initial Endpoint.
flag (bol): flag.
Returns:
HTTP response. JSON body. |
def CA_code_header(fname_out, Nca):
"""
Write 1023 bit CA (Gold) Code Header Files
Mark Wickert February 2015
"""
dir_path = os.path.dirname(os.path.realpath(__file__))
ca = loadtxt(dir_path + '/ca1thru37.txt', dtype=int16, usecols=(Nca - 1,), unpack=True)
M = 1023 # code period
... | Write 1023 bit CA (Gold) Code Header Files
Mark Wickert February 2015 |
def environment_as(**kwargs):
"""Update the environment to the supplied values, for example:
with environment_as(PYTHONPATH='foo:bar:baz',
PYTHON='/usr/bin/python2.7'):
subprocess.Popen(foo).wait()
"""
new_environment = kwargs
old_environment = {}
def setenv(key, val):
if val... | Update the environment to the supplied values, for example:
with environment_as(PYTHONPATH='foo:bar:baz',
PYTHON='/usr/bin/python2.7'):
subprocess.Popen(foo).wait() |
def shout(self, group, msg_p):
"""
Send message to a named group
Destroys message after sending
"""
return lib.zyre_shout(self._as_parameter_, group, byref(czmq.zmsg_p.from_param(msg_p))) | Send message to a named group
Destroys message after sending |
def action(callback=None, name=None, path=None, methods=Method.GET, resource=None, tags=None,
summary=None, middleware=None):
# type: (Callable, Path, Path, Methods, Type[Resource], Tags, str, List[Any]) -> Operation
"""
Decorator to apply an action to a resource. An action is applied to a `detai... | Decorator to apply an action to a resource. An action is applied to a `detail` operation. |
def fully_correlated_conditional(Kmn, Kmm, Knn, f, *, full_cov=False, full_output_cov=False, q_sqrt=None, white=False):
"""
This function handles conditioning of multi-output GPs in the case where the conditioning
points are all fully correlated, in both the prior and posterior.
:param Kmn: LM x N x P
... | This function handles conditioning of multi-output GPs in the case where the conditioning
points are all fully correlated, in both the prior and posterior.
:param Kmn: LM x N x P
:param Kmm: LM x LM
:param Knn: N x P or N x P x N x P
:param f: data matrix, LM x 1
:param q_sqrt: 1 x LM x LM or 1... |
def encode_basestring(s):
"""Return a JSON representation of a Python string
"""
if isinstance(s, str) and HAS_UTF8.search(s) is not None:
s = s.decode('utf-8')
def replace(match):
return ESCAPE_DCT[match.group(0)]
return u'"' + ESCAPE.sub(replace, s) + u'"' | Return a JSON representation of a Python string |
def _construct_axes_dict(self, axes=None, **kwargs):
"""Return an axes dictionary for myself."""
d = {a: self._get_axis(a) for a in (axes or self._AXIS_ORDERS)}
d.update(kwargs)
return d | Return an axes dictionary for myself. |
def recipe_weinreb17(adata, log=True, mean_threshold=0.01, cv_threshold=2,
n_pcs=50, svd_solver='randomized', random_state=0,
copy=False):
"""Normalization and filtering as of [Weinreb17]_.
Expects non-logarithmized data. If using logarithmized data, pass `log=False`.
... | Normalization and filtering as of [Weinreb17]_.
Expects non-logarithmized data. If using logarithmized data, pass `log=False`.
Parameters
----------
adata : :class:`~anndata.AnnData`
Annotated data matrix.
copy : bool (default: False)
Return a copy if true. |
def create_mssql_pymssql(username, password, host, port, database, **kwargs): # pragma: no cover
"""
create an engine connected to a mssql database using pymssql.
"""
return create_engine(
_create_mssql_pymssql(username, password, host, port, database),
**kwargs
) | create an engine connected to a mssql database using pymssql. |
def _init_read_gz(self):
"""Initialize for reading a gzip compressed fileobj.
"""
self.cmp = self.zlib.decompressobj(-self.zlib.MAX_WBITS)
self.dbuf = b""
# taken from gzip.GzipFile with some alterations
if self.__read(2) != b"\037\213":
raise ReadError("not ... | Initialize for reading a gzip compressed fileobj. |
def save_parameters(path, params=None):
"""Save all parameters into a file with the specified format.
Currently hdf5 and protobuf formats are supported.
Args:
path : path or file object
params (dict, optional): Parameters to be saved. Dictionary is of a parameter name (:obj:`str`) to :obj:`~nn... | Save all parameters into a file with the specified format.
Currently hdf5 and protobuf formats are supported.
Args:
path : path or file object
params (dict, optional): Parameters to be saved. Dictionary is of a parameter name (:obj:`str`) to :obj:`~nnabla.Variable`. |
def approxSimilarityJoin(self, datasetA, datasetB, threshold, distCol="distCol"):
"""
Join two datasets to approximately find all pairs of rows whose distance are smaller than
the threshold. If the :py:attr:`outputCol` is missing, the method will transform the data;
if the :py:attr:`outp... | Join two datasets to approximately find all pairs of rows whose distance are smaller than
the threshold. If the :py:attr:`outputCol` is missing, the method will transform the data;
if the :py:attr:`outputCol` exists, it will use that. This allows caching of the
transformed data when necessary.
... |
def reboot(name, **kwargs):
'''
Reboot a domain via ACPI request
:param vm_: domain name
:param connection: libvirt connection URI, overriding defaults
.. versionadded:: 2019.2.0
:param username: username to connect with, overriding defaults
.. versionadded:: 2019.2.0
:param p... | Reboot a domain via ACPI request
:param vm_: domain name
:param connection: libvirt connection URI, overriding defaults
.. versionadded:: 2019.2.0
:param username: username to connect with, overriding defaults
.. versionadded:: 2019.2.0
:param password: password to connect with, overr... |
def setup(name, path='log', enable_debug=False):
"""
Prepare a NestedSetup.
:param name: the channel name
:param path: the path where the logs will be written
:param enable_debug: do we want to save the message at the DEBUG level
:return a nested Setup
"""
path_tmpl = os.path.join(path... | Prepare a NestedSetup.
:param name: the channel name
:param path: the path where the logs will be written
:param enable_debug: do we want to save the message at the DEBUG level
:return a nested Setup |
def get(self, request, bot_id, id, format=None):
"""
Get KikBot by id
---
serializer: KikBotSerializer
responseMessages:
- code: 401
message: Not authenticated
"""
return super(KikBotDetail, self).get(request, bot_id, id, format) | Get KikBot by id
---
serializer: KikBotSerializer
responseMessages:
- code: 401
message: Not authenticated |
def extract_python_dict_from_x509(x509):
"""
Extract a python dictionary similar to the return value of
:meth:`ssl.SSLSocket.getpeercert` from the given
:class:`OpenSSL.crypto.X509` `x509` object.
Note that by far not all attributes are included; only those required to
use :func:`ssl.match_host... | Extract a python dictionary similar to the return value of
:meth:`ssl.SSLSocket.getpeercert` from the given
:class:`OpenSSL.crypto.X509` `x509` object.
Note that by far not all attributes are included; only those required to
use :func:`ssl.match_hostname` are extracted and put in the result.
In th... |
def ax(self):
"""
The matplotlib axes that the visualizer draws upon (can also be a grid
of multiple axes objects). The visualizer automatically creates an
axes for the user if one has not been specified.
"""
if not hasattr(self, "_ax") or self._ax is None:
se... | The matplotlib axes that the visualizer draws upon (can also be a grid
of multiple axes objects). The visualizer automatically creates an
axes for the user if one has not been specified. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.