code stringlengths 75 104k | docstring stringlengths 1 46.9k |
|---|---|
def _create_tensor_summary(
name,
true_positive_counts,
false_positive_counts,
true_negative_counts,
false_negative_counts,
precision,
recall,
num_thresholds=None,
display_name=None,
description=None,
collections=None):
"""A private helper method for generating a tensor sum... | A private helper method for generating a tensor summary.
We use a helper method instead of having `op` directly call `raw_data_op`
to prevent the scope of `raw_data_op` from being embedded within `op`.
Arguments are the same as for raw_data_op.
Returns:
A tensor summary that collects data for PR curves. |
def is_sparse_vector(x):
""" x is a 2D sparse matrix with it's first shape equal to 1.
"""
return sp.issparse(x) and len(x.shape) == 2 and x.shape[0] == 1 | x is a 2D sparse matrix with it's first shape equal to 1. |
def create_config(case=None, Exp='Dummy', Type='Tor',
Lim=None, Bump_posextent=[np.pi/4., np.pi/4],
R=2.4, r=1., elong=0., Dshape=0.,
divlow=True, divup=True, nP=200,
out='object', SavePath='./'):
""" Create easily a tofu.geom.Config object
... | Create easily a tofu.geom.Config object
In tofu, a Config (short for geometrical configuration) refers to the 3D
geometry of a fusion device.
It includes, at least, a simple 2D polygon describing the first wall of the
fusion chamber, and can also include other structural elements (tiles,
limiters..... |
def remove_arrays(code, count=1):
"""removes arrays and replaces them with ARRAY_LVALS
returns new code and replacement dict
*NOTE* has to be called AFTER remove objects"""
res = ''
last = ''
replacements = {}
for e in bracket_split(code, ['[]']):
if e[0] == '[':
if... | removes arrays and replaces them with ARRAY_LVALS
returns new code and replacement dict
*NOTE* has to be called AFTER remove objects |
def _parse(self, infile):
"""Actually parse the config file."""
temp_list_values = self.list_values
if self.unrepr:
self.list_values = False
comment_list = []
done_start = False
this_section = self
maxline = len(infile) - 1
cur_index = -1
... | Actually parse the config file. |
def _format_issue(issue):
'''
Helper function to format API return information into a more manageable
and useful dictionary for issue information.
issue
The issue to format.
'''
ret = {'id': issue.get('id'),
'issue_number': issue.get('number'),
'state': issue.get('... | Helper function to format API return information into a more manageable
and useful dictionary for issue information.
issue
The issue to format. |
def set_seeds(self, seeds):
"""
Function for manual seed setting. Sets variable seeds and prepares
voxels for density model.
:param seeds: ndarray (0 - nothing, 1 - object, 2 - background,
3 - object just hard constraints, no model training, 4 - background
just hard cons... | Function for manual seed setting. Sets variable seeds and prepares
voxels for density model.
:param seeds: ndarray (0 - nothing, 1 - object, 2 - background,
3 - object just hard constraints, no model training, 4 - background
just hard constraints, no model training) |
def list_files(
client, fileshare, prefix, recursive, timeout=None, snapshot=None):
# type: (azure.storage.file.FileService, str, str, bool, int, str) ->
# azure.storage.file.models.File
"""List files in path
:param azure.storage.file.FileService client: file client
:param str filesha... | List files in path
:param azure.storage.file.FileService client: file client
:param str fileshare: file share
:param str prefix: path prefix
:param bool recursive: recursive
:param int timeout: timeout
:param str snapshot: snapshot
:rtype: azure.storage.file.models.File
:return: generato... |
def plotplanarPotentials(Pot,*args,**kwargs):
"""
NAME:
plotplanarPotentials
PURPOSE:
plot a planar potential
INPUT:
Rrange - range (can be Quantity)
xrange, yrange - if relevant (can be Quantity)
grid, gridx, gridy - number of points to plot
savefilenam... | NAME:
plotplanarPotentials
PURPOSE:
plot a planar potential
INPUT:
Rrange - range (can be Quantity)
xrange, yrange - if relevant (can be Quantity)
grid, gridx, gridy - number of points to plot
savefilename - save to or restore from this savefile (pickle)
... |
def sbo_case_insensitive(self):
"""Matching packages distinguish between uppercase and
lowercase for sbo repository
"""
if "--case-ins" in self.flag:
data = SBoGrep(name="").names()
data_dict = Utils().case_sensitive(data)
for key, value in data_dict.i... | Matching packages distinguish between uppercase and
lowercase for sbo repository |
def __get_pending_revisions(self):
"""
Get all the pending revisions after the current time
:return: A list of revisions
:rtype: list
"""
dttime = time.mktime(datetime.datetime.now().timetuple())
changes = yield self.revisions.find({
"toa" : {
... | Get all the pending revisions after the current time
:return: A list of revisions
:rtype: list |
def intranges_from_list(list_):
"""Represent a list of integers as a sequence of ranges:
((start_0, end_0), (start_1, end_1), ...), such that the original
integers are exactly those x such that start_i <= x < end_i for some i.
Ranges are encoded as single integers (start << 32 | end), not as tuples.
... | Represent a list of integers as a sequence of ranges:
((start_0, end_0), (start_1, end_1), ...), such that the original
integers are exactly those x such that start_i <= x < end_i for some i.
Ranges are encoded as single integers (start << 32 | end), not as tuples. |
def remove_non_ascii(input_string):
"""Remove non-ascii characters
Source: http://stackoverflow.com/a/1342373
"""
no_ascii = "".join(i for i in input_string if ord(i) < 128)
return no_ascii | Remove non-ascii characters
Source: http://stackoverflow.com/a/1342373 |
def _generate_base_anchors(base_size, scales, ratios):
"""
Generate anchor (reference) windows by enumerating aspect ratios X
scales wrt a reference (0, 0, 15, 15) window.
"""
base_anchor = np.array([1, 1, base_size, base_size]) - 1
ratio_anchors = AnchorGenerator._ratio_... | Generate anchor (reference) windows by enumerating aspect ratios X
scales wrt a reference (0, 0, 15, 15) window. |
def _get_firewall_policy(kwargs):
'''
Construct FirewallPolicy and FirewallPolicy instances from passed arguments
'''
fp_name = kwargs.get('name', None)
fp_description = kwargs.get('description', None)
firewallPolicy = FirewallPolicy(
name=fp_name,
description=fp_description
... | Construct FirewallPolicy and FirewallPolicy instances from passed arguments |
def init_properties(self) -> 'PygalleBaseClass':
""" Initialize the Pigalle properties.
# Returns:
PygalleBaseClass: The current instance.
"""
self._pigalle = {
PygalleBaseClass.__KEYS.INTERNALS: dict(),
PygalleBaseClass.__KEYS.PUBLIC: dict()
... | Initialize the Pigalle properties.
# Returns:
PygalleBaseClass: The current instance. |
def propagate_paths_and_modules(self, context, paths, modules):
"""
One size fits all method to ensure a target context has been preloaded
with a set of small files and Python modules.
"""
for path in paths:
self.propagate_to(context, mitogen.core.to_text(path))
... | One size fits all method to ensure a target context has been preloaded
with a set of small files and Python modules. |
def dict_deep_merge(tgt, src):
"""
Utility function to merge the source dictionary `src` to the target
dictionary recursively
Note:
The type of the values in the dictionary can only be `dict` or `list`
Parameters:
tgt (dict): The target dictionary
src (dict): The source dic... | Utility function to merge the source dictionary `src` to the target
dictionary recursively
Note:
The type of the values in the dictionary can only be `dict` or `list`
Parameters:
tgt (dict): The target dictionary
src (dict): The source dictionary |
def _ParseComment(self, structure):
"""Parses a comment.
Args:
structure (pyparsing.ParseResults): structure parsed from the log file.
"""
if structure[1] == 'Date:':
self._year, self._month, self._day_of_month, _, _, _ = structure.date_time
elif structure[1] == 'Fields:':
self._P... | Parses a comment.
Args:
structure (pyparsing.ParseResults): structure parsed from the log file. |
def get_proxy_session(self):
"""Gets a ``ProxySession`` which is responsible for acquiring authentication credentials on behalf of a service client.
:return: a proxy session for this service
:rtype: ``osid.proxy.ProxySession``
:raise: ``OperationFailed`` -- unable to complete request
... | Gets a ``ProxySession`` which is responsible for acquiring authentication credentials on behalf of a service client.
:return: a proxy session for this service
:rtype: ``osid.proxy.ProxySession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``sup... |
def element_wise(self, func, *args, **kwargs):
"""Apply a function to each matrix element and return the result in a
new operator matrix of the same shape.
Args:
func (FunctionType): A function to be applied to each element. It
must take the element as its first argu... | Apply a function to each matrix element and return the result in a
new operator matrix of the same shape.
Args:
func (FunctionType): A function to be applied to each element. It
must take the element as its first argument.
args: Additional positional arguments to... |
def parse_set(string):
"""Parse set from comma separated string."""
string = string.strip()
if string:
return set(string.split(","))
else:
return set() | Parse set from comma separated string. |
def pack_column_flat(self, value, components=None, offset=False):
"""
TODO: add documentation
"""
if components:
if isinstance(components, str):
components = [components]
elif isinstance(components, list):
components = components
... | TODO: add documentation |
def ilxSearches(self,
ilx_ids=None,
LIMIT=25,
_print=True,
crawl=False):
"""parameters( data = "list of ilx_ids" )"""
url_base = self.base_url + "/api/1/ilx/search/identifier/{identifier}?key={APIKEY}"
urls = [url_ba... | parameters( data = "list of ilx_ids" ) |
def show_xys(self, xs, ys)->None:
"Show the `xs` (inputs) and `ys` (targets)."
from IPython.display import display, HTML
items,names = [], xs[0].names + ['target']
for i, (x,y) in enumerate(zip(xs,ys)):
res = []
cats = x.cats if len(x.cats.size()) > 0 else []
... | Show the `xs` (inputs) and `ys` (targets). |
def findLowest(self, symorders):
"""Find the position of the first lowest tie in a
symorder or -1 if there are no ties"""
_range = range(len(symorders))
stableSymorders = map(None, symorders, _range)
# XXX FIX ME
# Do I need to sort?
stableSymorders.sort()
... | Find the position of the first lowest tie in a
symorder or -1 if there are no ties |
def easeOutElastic(n, amplitude=1, period=0.3):
"""An elastic tween function that overshoots the destination and then "rubber bands" into the destination.
Args:
n (float): The time progress, starting at 0.0 and ending at 1.0.
Returns:
(float) The line progress, starting at 0.0 and ending at 1.... | An elastic tween function that overshoots the destination and then "rubber bands" into the destination.
Args:
n (float): The time progress, starting at 0.0 and ending at 1.0.
Returns:
(float) The line progress, starting at 0.0 and ending at 1.0. Suitable for passing to getPointOnLine(). |
def _remove_mapper_from_plotter(plotter, actor, reset_camera):
"""removes this actor's mapper from the given plotter's _scalar_bar_mappers"""
try:
mapper = actor.GetMapper()
except AttributeError:
return
for name in list(plotter._scalar_bar_mappers.keys()):
try:
plott... | removes this actor's mapper from the given plotter's _scalar_bar_mappers |
def plot(self, *args, **kwargs):
"""
Plot latent space X in 1D:
See GPy.plotting.matplot_dep.variational_plots
"""
import sys
assert "matplotlib" in sys.modules, "matplotlib package has not been imported."
from ...plotting.matplot_dep import variational_plots
... | Plot latent space X in 1D:
See GPy.plotting.matplot_dep.variational_plots |
def _update_zipimporter_cache(normalized_path, cache, updater=None):
"""
Update zipimporter cache data for a given normalized path.
Any sub-path entries are processed as well, i.e. those corresponding to zip
archives embedded in other zip archives.
Given updater is a callable taking a cache entry ... | Update zipimporter cache data for a given normalized path.
Any sub-path entries are processed as well, i.e. those corresponding to zip
archives embedded in other zip archives.
Given updater is a callable taking a cache entry key and the original entry
(after already removing the entry from the cache),... |
def required_max_memory(cls, id, memory):
"""
Recommend a max_memory setting for this vm given memory. If the
VM already has a nice setting, return None. The max_memory
param cannot be fixed too high, because page table allocation
would cost too much for small memory profile. Use... | Recommend a max_memory setting for this vm given memory. If the
VM already has a nice setting, return None. The max_memory
param cannot be fixed too high, because page table allocation
would cost too much for small memory profile. Use a range as below. |
def average_sources(source_encoded: mx.sym.Symbol, source_encoded_length: mx.sym.Symbol) -> mx.nd.NDArray:
"""
Calculate the average of encoded sources taking into account their lengths.
:param source_encoded: Encoder representation for n elements. Shape: (n, source_encoded_length, hidden_size)... | Calculate the average of encoded sources taking into account their lengths.
:param source_encoded: Encoder representation for n elements. Shape: (n, source_encoded_length, hidden_size).
:param source_encoded_length: A vector of encoded sequence lengths. Shape: (n,).
:return: Average vectors. Sh... |
def get_header(self, service_id, version_number, name):
"""Retrieves a Header object by name."""
content = self._fetch("/service/%s/version/%d/header/%s" % (service_id, version_number, name))
return FastlyHeader(self, content) | Retrieves a Header object by name. |
def u2i(uint32):
"""
Converts a 32 bit unsigned number to signed.
uint32:= an unsigned 32 bit number
...
print(u2i(4294967272))
-24
print(u2i(37))
37
...
"""
mask = (2 ** 32) - 1
if uint32 & (1 << 31):
v = uint32 | ~mask
else:
v = uint32 & mask
r... | Converts a 32 bit unsigned number to signed.
uint32:= an unsigned 32 bit number
...
print(u2i(4294967272))
-24
print(u2i(37))
37
... |
def value_equality(cls: type = None,
*,
unhashable: bool = False,
distinct_child_types: bool = False,
manual_cls: bool = False,
approximate: bool = False
) -> Union[Callable[[type], type], type]:
"""Impl... | Implements __eq__/__ne__/__hash__ via a _value_equality_values_ method.
_value_equality_values_ is a method that the decorated class must implement.
_value_equality_approximate_values_ is a method that the decorated class
might implement if special support for approximate equality is required.
This is... |
def _output_format(cls, func, override=None):
""" Decorator in charge of giving the output its right format, either
json or pandas
Keyword Arguments:
func: The function to be decorated
override: Override the internal format of the call, default None
"""
... | Decorator in charge of giving the output its right format, either
json or pandas
Keyword Arguments:
func: The function to be decorated
override: Override the internal format of the call, default None |
def build_from_node(package, node):
"""
Compile a Quilt data package from an existing package node.
"""
team, owner, pkg, subpath = parse_package(package, allow_subpath=True)
_check_team_id(team)
store = PackageStore()
pkg_root = get_or_create_package(store, team, owner, pkg, subpath)
... | Compile a Quilt data package from an existing package node. |
def _start_server(self, *args):
"""Run the node local server"""
self.log("Starting server", args)
secure = self.certificate is not None
if secure:
self.log("Running SSL server with cert:", self.certificate)
else:
self.log("Running insecure server without ... | Run the node local server |
def add_albumart(albumart, song_title):
'''
Adds the album art to the song
'''
try:
img = urlopen(albumart) # Gets album art from url
except Exception:
log.log_error("* Could not add album art", indented=True)
return None
audio = EasyMP3(song_title, ID3=ID3)
try:
... | Adds the album art to the song |
def fill_luis_event_properties(
self,
recognizer_result: RecognizerResult,
turn_context: TurnContext,
telemetry_properties: Dict[str, str] = None,
) -> Dict[str, str]:
"""Fills the event properties for LuisResult event for telemetry.
These properties are logged when t... | Fills the event properties for LuisResult event for telemetry.
These properties are logged when the recognizer is called.
:param recognizer_result: Last activity sent from user.
:type recognizer_result: RecognizerResult
:param turn_context: Context object containing information ... |
def default_metadata_db_path():
"""Helper to get the default path for the metadata file.
:returns: The path to where the default location of the metadata
database is. Maps to which is ~/.inasafe.metadata35.db
:rtype: str
"""
home = expanduser("~")
home = os.... | Helper to get the default path for the metadata file.
:returns: The path to where the default location of the metadata
database is. Maps to which is ~/.inasafe.metadata35.db
:rtype: str |
def human_filesize(i):
"""
'human-readable' file size (i.e. 13 KB, 4.1 MB, 102 bytes, etc).
"""
bytes = float(i)
if bytes < 1024:
return u"%d Byte%s" % (bytes, bytes != 1 and u"s" or u"")
if bytes < 1024 * 1024:
return u"%.1f KB" % (bytes / 1024)
if bytes < 1024 * 1024 * 1024... | 'human-readable' file size (i.e. 13 KB, 4.1 MB, 102 bytes, etc). |
def getParameterByName(self, name):
"""Searchs a parameter by name and returns it."""
result = None
for parameter in self.getParameters():
nameParam = parameter.getName()
if nameParam == name:
result = parameter
break
return result | Searchs a parameter by name and returns it. |
def meff_lh_110(self, **kwargs):
'''
Returns the light-hole band effective mass in the [110] direction,
meff_lh_110, in units of electron mass.
'''
return 2. / (2 * self.luttinger1(**kwargs) + self.luttinger2(**kwargs)
+ 3 * self.luttinger3(**kwargs)) | Returns the light-hole band effective mass in the [110] direction,
meff_lh_110, in units of electron mass. |
def _load_relation(self, models, name, constraints):
"""
Eagerly load the relationship on a set of models.
:rtype: list
"""
relation = self.get_relation(name)
relation.add_eager_constraints(models)
if callable(constraints):
constraints(relation)
... | Eagerly load the relationship on a set of models.
:rtype: list |
def intrinsics_multi_constructor(loader, tag_prefix, node):
"""
YAML constructor to parse CloudFormation intrinsics.
This will return a dictionary with key being the instrinsic name
"""
# Get the actual tag name excluding the first exclamation
tag = node.tag[1:]
# Some intrinsic functions ... | YAML constructor to parse CloudFormation intrinsics.
This will return a dictionary with key being the instrinsic name |
def unArrayify(self, gene):
"""
Copies gene bias values and weights to network bias values and
weights.
"""
g = 0
# if gene is too small an IndexError will be thrown
for layer in self.layers:
if layer.type != 'Input':
for i in range(lay... | Copies gene bias values and weights to network bias values and
weights. |
def age_simulants(self, event: Event):
"""Updates simulant age on every time step.
Parameters
----------
event :
An event object emitted by the simulation containing an index
representing the simulants affected by the event and timing
information.
... | Updates simulant age on every time step.
Parameters
----------
event :
An event object emitted by the simulation containing an index
representing the simulants affected by the event and timing
information. |
def multiple_outputs_from_file(cls, filename, keep_sub_files=True):
"""
Parses a QChem output file with multiple calculations
1.) Seperates the output into sub-files
e.g. qcout -> qcout.0, qcout.1, qcout.2 ... qcout.N
a.) Find delimeter for multiple calcua... | Parses a QChem output file with multiple calculations
1.) Seperates the output into sub-files
e.g. qcout -> qcout.0, qcout.1, qcout.2 ... qcout.N
a.) Find delimeter for multiple calcualtions
b.) Make seperate output sub-files
2.) Creates seperate Q... |
def p_x_commalist(self,t):
"""commalist : commalist ',' expression
| expression
"""
if len(t) == 2: t[0] = CommaX([t[1]])
elif len(t) == 4: t[0] = CommaX(t[1].children+[t[3]])
else: raise NotImplementedError('unk_len',len(t)) # pragma: no cover | commalist : commalist ',' expression
| expression |
def _assert_refspec(self):
"""Turns out we can't deal with remotes if the refspec is missing"""
config = self.config_reader
unset = 'placeholder'
try:
if config.get_value('fetch', default=unset) is unset:
msg = "Remote '%s' has no refspec set.\n"
... | Turns out we can't deal with remotes if the refspec is missing |
def get_overlapping_ranges(self, collection_link, sorted_ranges):
'''
Given the sorted ranges and a collection,
Returns the list of overlapping partition key ranges
:param str collection_link:
The collection link.
:param (list of routing_range._Range) sorted_... | Given the sorted ranges and a collection,
Returns the list of overlapping partition key ranges
:param str collection_link:
The collection link.
:param (list of routing_range._Range) sorted_ranges: The sorted list of non-overlapping ranges.
:return:
List o... |
def fail(self, err='MockupDB query failure', *args, **kwargs):
"""Reply to a query with the QueryFailure flag and an '$err' key.
Returns True so it is suitable as an `~MockupDB.autoresponds` handler.
"""
kwargs.setdefault('flags', 0)
kwargs['flags'] |= REPLY_FLAGS['QueryFailure'... | Reply to a query with the QueryFailure flag and an '$err' key.
Returns True so it is suitable as an `~MockupDB.autoresponds` handler. |
def save_current_nb_as_html(info=False):
"""
Save the current notebook as html file in the same directory
"""
assert in_ipynb()
full_path = get_notebook_name()
path, filename = os.path.split(full_path)
wd_save = os.getcwd()
os.chdir(path)
cmd = 'jupyter nbconvert --to html "{}"'.fo... | Save the current notebook as html file in the same directory |
def convert_command_output(*command):
"""
Command line interface for ``coloredlogs --to-html``.
Takes a command (and its arguments) and runs the program under ``script``
(emulating an interactive terminal), intercepts the output of the command
and converts ANSI escape sequences in the output to HTM... | Command line interface for ``coloredlogs --to-html``.
Takes a command (and its arguments) and runs the program under ``script``
(emulating an interactive terminal), intercepts the output of the command
and converts ANSI escape sequences in the output to HTML. |
def __send_static_file(self, path=None):
"""
Send apidoc files from the apidoc folder to the browser.
:param path: the apidoc file.
"""
if not path:
path = 'index.html'
file_name = join(self.folder_path, path)
# the api_project.js has the absolute u... | Send apidoc files from the apidoc folder to the browser.
:param path: the apidoc file. |
def parse_declaration_expressn_fncall_SUBparams(self, params):
"""
Needs rearrangement:
0 1 2
WDL native params: sub(input, pattern, replace)
1 2 0
Python's re.sub() params: sub(pattern, replace, input)
:param params:
:param es:
:return:
... | Needs rearrangement:
0 1 2
WDL native params: sub(input, pattern, replace)
1 2 0
Python's re.sub() params: sub(pattern, replace, input)
:param params:
:param es:
:return: |
def auth_stage2(self,stanza):
"""Handle the first stage authentication response (result of the <iq
type="get"/>).
[client only]"""
self.lock.acquire()
try:
self.__logger.debug("Procesing auth response...")
self.available_auth_methods=[]
if (st... | Handle the first stage authentication response (result of the <iq
type="get"/>).
[client only] |
def get_field(hologram, sideband=+1, filter_name="disk", filter_size=1 / 3,
subtract_mean=True, zero_pad=True, copy=True):
"""Compute the complex field from a hologram using Fourier analysis
Parameters
----------
hologram: real-valued 2d ndarray
hologram data
sideband: +1, -1,... | Compute the complex field from a hologram using Fourier analysis
Parameters
----------
hologram: real-valued 2d ndarray
hologram data
sideband: +1, -1, or tuple of (float, float)
specifies the location of the sideband:
- +1: sideband in the upper half in Fourier space,
... |
def get_as_string(self, s3_path, encoding='utf-8'):
"""
Get the contents of an object stored in S3 as string.
:param s3_path: URL for target S3 location
:param encoding: Encoding to decode bytes to string
:return: File contents as a string
"""
content = self.get_... | Get the contents of an object stored in S3 as string.
:param s3_path: URL for target S3 location
:param encoding: Encoding to decode bytes to string
:return: File contents as a string |
def query_struct(self, name):
"""Query struct."""
sql = 'select id, file_id, name from code_items '\
'where name = ?'
self.cursor.execute(sql, (name,))
for i in self.cursor.fetchall():
sql = 'select id, type, name from code_items ' \
'where par... | Query struct. |
def _check_pending(self, tag, match_func=None):
"""Check the pending_events list for events that match the tag
:param tag: The tag to search for
:type tag: str
:param tags_regex: List of re expressions to search for also
:type tags_regex: list[re.compile()]
:return:
... | Check the pending_events list for events that match the tag
:param tag: The tag to search for
:type tag: str
:param tags_regex: List of re expressions to search for also
:type tags_regex: list[re.compile()]
:return: |
def _obj_index(self, uri, base_path, marked_path, headers, spr=False):
"""Return an index of objects from within the container.
:param uri:
:param base_path:
:param marked_path:
:param headers:
:param spr: "single page return" Limit the returned data to one page
... | Return an index of objects from within the container.
:param uri:
:param base_path:
:param marked_path:
:param headers:
:param spr: "single page return" Limit the returned data to one page
:type spr: ``bol``
:return: |
def _deserialization_helper(self, state, ray_forking):
"""This is defined in order to make pickling work.
Args:
state: The serialized state of the actor handle.
ray_forking: True if this is being called because Ray is forking
the actor handle and false if it is b... | This is defined in order to make pickling work.
Args:
state: The serialized state of the actor handle.
ray_forking: True if this is being called because Ray is forking
the actor handle and false if it is being called by pickling. |
def main(pub_port=None, sub_port=None):
'''main of forwarder
:param sub_port: port for subscribers
:param pub_port: port for publishers
'''
try:
if sub_port is None:
sub_port = get_sub_port()
if pub_port is None:
pub_port = get_pub_port()
context = zm... | main of forwarder
:param sub_port: port for subscribers
:param pub_port: port for publishers |
def build_sector_fundamentals(sector):
'''
In this method, for the given sector, we'll get the data we need for each stock
in the sector from IEX. Once we have the data, we'll check that the earnings
reports meet our criteria with `eps_good()`. We'll put stocks that meet those
requirements into a da... | In this method, for the given sector, we'll get the data we need for each stock
in the sector from IEX. Once we have the data, we'll check that the earnings
reports meet our criteria with `eps_good()`. We'll put stocks that meet those
requirements into a dataframe along with all the data about them we'll ne... |
def tags(self):
"""The tags property.
Returns:
(hash). the property value. (defaults to: {})
"""
if 'tags' in self._values:
return self._values['tags']
self._values['tags'] = copy.deepcopy(self._defaults['tags'])
return self._values['tags'... | The tags property.
Returns:
(hash). the property value. (defaults to: {}) |
def submit(self, command='sleep 1', blocksize=1, job_name="parsl.auto"):
"""Submit command to an Azure instance.
Submit returns an ID that corresponds to the task that was just submitted.
Parameters
----------
command : str
Command to be invoked on the remote side.
... | Submit command to an Azure instance.
Submit returns an ID that corresponds to the task that was just submitted.
Parameters
----------
command : str
Command to be invoked on the remote side.
blocksize : int
Number of blocks requested.
job_name : s... |
def _children(self):
"""Yield all direct children of this object."""
if isinstance(self.condition, CodeExpression):
yield self.condition
for codeobj in self.body._children():
yield codeobj
for codeobj in self.else_body._children():
yield codeobj | Yield all direct children of this object. |
def is_false(self, e, extra_constraints=(), solver=None, model_callback=None): #pylint:disable=unused-argument
"""
Should return True if e can be easily found to be False.
:param e: The AST
:param extra_constraints: Extra constraints (as ASTs) to add to the solver fo... | Should return True if e can be easily found to be False.
:param e: The AST
:param extra_constraints: Extra constraints (as ASTs) to add to the solver for this solve.
:param solver: A solver, for backends that require it
:param model_callback: a func... |
def setup_simulation(components: List, input_config: Mapping=None,
plugin_config: Mapping=None) -> InteractiveContext:
"""Construct a simulation from a list of components and call its setup
method.
Parameters
----------
components
A list of initialized simulation compon... | Construct a simulation from a list of components and call its setup
method.
Parameters
----------
components
A list of initialized simulation components. Corresponds to the
components block of a model specification.
input_config
A nested dictionary with any additional simula... |
def stdout():
"""
Returns the stdout as a byte stream in a Py2/PY3 compatible manner
Returns
-------
io.BytesIO
Byte stream of Stdout
"""
# We write all of the data to stdout with bytes, typically io.BytesIO. stdout in Python2
# accepts bytes but Python3 does not. This is due t... | Returns the stdout as a byte stream in a Py2/PY3 compatible manner
Returns
-------
io.BytesIO
Byte stream of Stdout |
def get_unique_named_object(root, name):
"""
retrieves a unique named object (no fully qualified name)
Args:
root: start of search
name: name of object
Returns:
the object (if not unique, raises an error)
"""
a = get_children(lambda x: hasattr(x, 'name') and x.name == n... | retrieves a unique named object (no fully qualified name)
Args:
root: start of search
name: name of object
Returns:
the object (if not unique, raises an error) |
def to_browser_mode(self):
""" Write all the messages to files and open them in the browser """
for message_no in range(len(self.messages)):
self.__to_browser(message_no) | Write all the messages to files and open them in the browser |
def fetch_chain(self, certr, max_length=10):
"""
Fetch the intermediary chain for a certificate.
:param acme.messages.CertificateResource certr: The certificate to
fetch the chain for.
:param int max_length: The maximum length of the chain that will be
fetched.
... | Fetch the intermediary chain for a certificate.
:param acme.messages.CertificateResource certr: The certificate to
fetch the chain for.
:param int max_length: The maximum length of the chain that will be
fetched.
:rtype: Deferred[List[`acme.messages.CertificateResource`... |
def Nu_Xu(Re, Pr, rho_w=None, rho_b=None, mu_w=None, mu_b=None):
r'''Calculates internal convection Nusselt number for turbulent vertical
upward flow in a pipe under supercritical conditions according to [1]_.
.. math::
Nu_b = 0.02269 Re_b^{0.8079} \bar{Pr}_b^{0.9213}
\left(\frac{\r... | r'''Calculates internal convection Nusselt number for turbulent vertical
upward flow in a pipe under supercritical conditions according to [1]_.
.. math::
Nu_b = 0.02269 Re_b^{0.8079} \bar{Pr}_b^{0.9213}
\left(\frac{\rho_w}{\rho_b}\right)^{0.6638}
\left(\frac{\mu_w}{\mu_b}\right... |
def get_end_date_metadata(self):
"""Gets the metadata for an end date.
return: (osid.Metadata) - metadata for the date
*compliance: mandatory -- This method must be implemented.*
"""
metadata = dict(self._mdata['end_date'])
metadata.update({'existing_date_time_values': ... | Gets the metadata for an end date.
return: (osid.Metadata) - metadata for the date
*compliance: mandatory -- This method must be implemented.* |
def get_authors_by_keyword(keyword: str, graph=None, authors=None) -> Set[str]:
"""Get authors for whom the search term is a substring.
:param pybel.BELGraph graph: A BEL graph
:param keyword: The keyword to search the author strings for
:param set[str] authors: An optional set of pre-cached author... | Get authors for whom the search term is a substring.
:param pybel.BELGraph graph: A BEL graph
:param keyword: The keyword to search the author strings for
:param set[str] authors: An optional set of pre-cached authors calculated from the graph
:return: A set of authors with the keyword as a substri... |
def recent_update_frequencies(self):
""" Returns the 10 most recent update frequencies.
The given frequencies are computed as short-term frequencies!
The 0th element of the list corresponds to the most recent frequency.
"""
return list(reversed([(1.0 / p) for p in numpy.diff(sel... | Returns the 10 most recent update frequencies.
The given frequencies are computed as short-term frequencies!
The 0th element of the list corresponds to the most recent frequency. |
def execute(sql, args=None, key='default'):
"""It is used for update, delete records.
:param sql string: the sql stamtement like 'select * from %s'
:param args list: Wen set None, will use dbi execute(sql), else
dbi execute(sql, args), the args keep the original rules, it shuld be tuple or lis... | It is used for update, delete records.
:param sql string: the sql stamtement like 'select * from %s'
:param args list: Wen set None, will use dbi execute(sql), else
dbi execute(sql, args), the args keep the original rules, it shuld be tuple or list of list
:param key: a key for your dabtabase ... |
def mtf_bitransformer_all_layers_tiny():
"""Test out all the layers on local CPU."""
hparams = mtf_bitransformer_tiny()
hparams.moe_num_experts = 4
hparams.moe_expert_x = 4
hparams.moe_expert_y = 4
hparams.moe_hidden_size = 512
hparams.encoder_layers = [
"self_att", "local_self_att", "moe_1d", "moe_... | Test out all the layers on local CPU. |
def add_tar_opts (cmdlist, compression, verbosity):
"""Add tar options to cmdlist."""
progname = os.path.basename(cmdlist[0])
if compression == 'gzip':
cmdlist.append('-z')
elif compression == 'compress':
cmdlist.append('-Z')
elif compression == 'bzip2':
cmdlist.append('-j')
... | Add tar options to cmdlist. |
def _modules_to_main(modList):
"""Force every module in modList to be placed into main"""
if not modList:
return
main = sys.modules['__main__']
for modname in modList:
if isinstance(modname, str):
try:
mod = __import__(modname)
except Exception:
sys.stderr.write(
... | Force every module in modList to be placed into main |
def put(request, obj_id=None):
"""Adds tags from objects resolved from guids
:param tags: Tags to add
:type tags: list
:param guids: Guids to add tags from
:type guids: list
:returns: json
"""
res = Result()
data = request.PUT or json.loads(request.body)['body']
if obj_id:
... | Adds tags from objects resolved from guids
:param tags: Tags to add
:type tags: list
:param guids: Guids to add tags from
:type guids: list
:returns: json |
def _scaleTo8bit(self, img):
'''
The pattern comparator need images to be 8 bit
-> find the range of the signal and scale the image
'''
r = scaleSignalCutParams(img, 0.02) # , nSigma=3)
self.signal_ranges.append(r)
return toUIntArray(img, dtype=np.uint8, r... | The pattern comparator need images to be 8 bit
-> find the range of the signal and scale the image |
def loadedfields(self):
'''Generator of fields loaded from database'''
if self._loadedfields is None:
for field in self._meta.scalarfields:
yield field
else:
fields = self._meta.dfields
processed = set()
for name in self._loadedfiel... | Generator of fields loaded from database |
def _add_timedelta(self, delta):
"""
Add timedelta duration to the instance.
:param delta: The timedelta instance
:type delta: pendulum.Duration or datetime.timedelta
:rtype: DateTime
"""
if isinstance(delta, pendulum.Period):
return self.add(
... | Add timedelta duration to the instance.
:param delta: The timedelta instance
:type delta: pendulum.Duration or datetime.timedelta
:rtype: DateTime |
def write_to(self, group, append=False):
"""Write the data to the given group.
:param h5py.Group group: The group to write the data on. It is
assumed that the group is already existing or initialized
to store h5features data (i.e. the method
``Data.init_group`` have ... | Write the data to the given group.
:param h5py.Group group: The group to write the data on. It is
assumed that the group is already existing or initialized
to store h5features data (i.e. the method
``Data.init_group`` have been called.
:param bool append: If False, ... |
def add_error(self, txt):
"""Add a message in the configuration errors list so we can print them
all in one place
Set the object configuration as not correct
:param txt: error message
:type txt: str
:return: None
"""
self.configuration_errors.append(tx... | Add a message in the configuration errors list so we can print them
all in one place
Set the object configuration as not correct
:param txt: error message
:type txt: str
:return: None |
def get_did_providers(self, did):
"""
Return the list providers registered on-chain for the given did.
:param did: hex str the id of an asset on-chain
:return:
list of addresses
None if asset has no registerd providers
"""
register_values = self.c... | Return the list providers registered on-chain for the given did.
:param did: hex str the id of an asset on-chain
:return:
list of addresses
None if asset has no registerd providers |
def tpu_conv1d(inputs, filters, kernel_size, padding="SAME", name="tpu_conv1d"):
"""Version of conv1d that works on TPU (as of 11/2017).
Args:
inputs: a Tensor with shape [batch, length, input_depth].
filters: an integer.
kernel_size: an integer.
padding: a string - "SAME" or "LEFT".
name: a st... | Version of conv1d that works on TPU (as of 11/2017).
Args:
inputs: a Tensor with shape [batch, length, input_depth].
filters: an integer.
kernel_size: an integer.
padding: a string - "SAME" or "LEFT".
name: a string.
Returns:
a Tensor with shape [batch, length, filters]. |
def get_address(name, hash, db, target=None):
'''
fetches the contract address of deployment
:param hash: the contract file hash
:return: (string) address of the contract
error, if any
'''
key = DB.pkey([EZO.DEPLOYED, name, target, hash])
d, er... | fetches the contract address of deployment
:param hash: the contract file hash
:return: (string) address of the contract
error, if any |
def decode_body(cls, header, f):
"""Generates a `MqttPingreq` packet given a
`MqttFixedHeader`. This method asserts that header.packet_type
is `pingreq`.
Parameters
----------
header: MqttFixedHeader
f: file
Object with a read method.
Raises... | Generates a `MqttPingreq` packet given a
`MqttFixedHeader`. This method asserts that header.packet_type
is `pingreq`.
Parameters
----------
header: MqttFixedHeader
f: file
Object with a read method.
Raises
------
DecodeError
... |
def list(self, *kinds, **kwargs):
"""Returns a list of inputs that are in the :class:`Inputs` collection.
You can also filter by one or more input kinds.
This function iterates over all possible inputs, regardless of any arguments you
specify. Because the :class:`Inputs` collection is t... | Returns a list of inputs that are in the :class:`Inputs` collection.
You can also filter by one or more input kinds.
This function iterates over all possible inputs, regardless of any arguments you
specify. Because the :class:`Inputs` collection is the union of all the inputs of each
ki... |
def wunique_(self, col):
"""
Weight unique values: returns a dataframe with a count
of unique values
"""
try:
s = pd.value_counts(self.df[col].values)
df = pd.DataFrame(s, columns=["Number"])
return df
except Exception as e:
... | Weight unique values: returns a dataframe with a count
of unique values |
def all_phrase_translations(phrase):
'''
Return the set of translations for all possible words in a full
phrase. Chinese is sometimes ambiguous. We do not attempt to
disambiguate, or handle unknown letters especially well. Full
parsing is left to upstream logic.
'''
if not trees:
ini... | Return the set of translations for all possible words in a full
phrase. Chinese is sometimes ambiguous. We do not attempt to
disambiguate, or handle unknown letters especially well. Full
parsing is left to upstream logic. |
def _hue(color, **kwargs):
""" Get hue value of HSL color.
"""
h = colorsys.rgb_to_hls(*[x / 255.0 for x in color.value[:3]])[0]
return NumberValue(h * 360.0) | Get hue value of HSL color. |
def dependent_on_composite_state(self):
""" method iterates over all nodes that provide dependency to the current node,
and compile composite state of them all
:return instance of <NodesCompositeState>
"""
composite_state = NodesCompositeState()
for dependent_on ... | method iterates over all nodes that provide dependency to the current node,
and compile composite state of them all
:return instance of <NodesCompositeState> |
def get_year_start(day=None):
"""Returns January 1 of the given year."""
day = add_timezone(day or datetime.date.today())
return day.replace(month=1).replace(day=1) | Returns January 1 of the given year. |
def _construct_key(self, rule_id: str, spacy_rule_id:int) -> int:
"""
Use a mapping to store the information about rule_id for each matches, create the mapping key here
Args:
rule_id: str
spacy_rule_id:int
Returns: int
"""
hash_key = (rule_id, sp... | Use a mapping to store the information about rule_id for each matches, create the mapping key here
Args:
rule_id: str
spacy_rule_id:int
Returns: int |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.