Unnamed: 0 int64 0 389k | code stringlengths 26 79.6k | docstring stringlengths 1 46.9k |
|---|---|---|
0 | def format_exp_floats(decimals):
threshold = 10 ** 5
return (
lambda n: "{:.{prec}e}".format(n, prec=decimals) if n > threshold else "{:4.{prec}f}".format(n, prec=decimals)
) | sometimes the exp. column can be too large |
1 | async def execute(self, coro, name, user, info=None):
task = self.schedCoro(coro)
return await s_task.Task.anit(self, task, name, user, info=info) | Create a synapse task from the given coroutine. |
2 | def pinch(self, direction=, percent=0.6, duration=2.0, dead_zone=0.1):
if direction not in (, ):
raise ValueError(.format(repr(direction)))
if dead_zone >= percent:
raise ValueError(
.format(repr(dead_zone), repr(percent)))
w, h = s... | Squeezing or expanding 2 fingers on this UI with given motion range and duration.
Args:
direction (:py:obj:`str`): pinching direction, only "in" or "out". "in" for squeezing, "out" for expanding
percent (:py:obj:`float`): squeezing range from or expanding range to of the bounds of the U... |
3 | def on_log(self):
def decorator(handler):
self.client.on_log = handler
return handler
return decorator | Decorate a callback function to handle MQTT logging.
**Example Usage:**
::
@mqtt.on_log()
def handle_logging(client, userdata, level, buf):
print(client, userdata, level, buf) |
4 | def fixminimized(self, alphabet):
insymbols = fst.SymbolTable()
outsymbols = fst.SymbolTable()
num = 1
for char in self.alphabet:
self.isyms.__setitem__(char, num)
self.osyms.__setitem__(char, num)
insymbols.add_symbol(char, num)
... | After pyfst minimization,
all unused arcs are removed,
and all sink states are removed.
However this may break compatibility.
Args:
alphabet (list): The input alphabet
Returns:
None |
5 | def predict_proba(self, time):
check_is_fitted(self, "unique_time_")
time = check_array(time, ensure_2d=False)
extends = time > self.unique_time_[-1]
if self.prob_[-1] > 0 and extends.any():
raise ValueError("time must be smaller than largest "
... | Return probability of an event after given time point.
:math:`\\hat{S}(t) = P(T > t)`
Parameters
----------
time : array, shape = (n_samples,)
Time to estimate probability at.
Returns
-------
prob : array, shape = (n_samples,)
Probabilit... |
6 | def classify(self, token_type, value, lineno, column, line):
if token_type == self.grammar.KEYWORD_TOKEN:
label_index = self.grammar.keyword_ids.get(value, -1)
if label_index != -1:
return label_index
label_index = self.grammar.token_ids.get(token_type, -... | Find the label for a token. |
7 | def phi_vector(self):
weights = self.pst.observation_data.loc[self.names,"weight"]
obsval = self.pst.observation_data.loc[self.names,"obsval"]
phi_vec = []
for idx in self.index.values:
simval = self.loc[idx,self.names]
phi = (((simval - obsval) * weights... | property decorated method to get a vector of L2 norm (phi)
for the realizations. The ObservationEnsemble.pst.weights can be
updated prior to calling this method to evaluate new weighting strategies
Return
------
pandas.DataFrame : pandas.DataFrame |
8 | def nrmse_iqr(simulated_array, observed_array, replace_nan=None, replace_inf=None,
remove_neg=False, remove_zero=False):
simulated_array, observed_array = treat_values(
simulated_array,
observed_array,
replace_nan=replace_nan,
replace_inf=replace_inf,
... | Compute the IQR normalized root mean square error between the simulated and observed data.
.. image:: /pictures/NRMSE_IQR.png
**Range:** 0 ≤ NRMSE < inf.
**Notes:** This metric is the RMSE normalized by the interquartile range of the observed time
series (x). Normalizing allows comparison between dat... |
9 | def _updatePoolingState(self, activeColWithPredictedInput, fractionUnpredicted):
if fractionUnpredicted > self._poolingThreshUnpredicted:
if self._spVerbosity > 3:
print " reset pooling state for all cells"
self._poolingActivation = numpy.zeros(self._numColumns, dtype="int32")... | This function updates the pooling state of TP cells. A cell will stop
pooling if:
(1) It hasn't received any predicted input in the last self._poolingLife
steps
or
(2) the overall fraction of unpredicted input to the TP is above
_poolingThreshUnpredicted |
10 | def _init_vocab(self, token_generator, add_reserved_tokens=True):
self._id_to_token = {}
non_reserved_start_index = 0
if add_reserved_tokens:
self._id_to_token.update(enumerate(RESERVED_TOKENS))
non_reserved_start_index = len(RESERVED_TOKENS)
self._id_to_token.update(
enumera... | Initialize vocabulary with tokens from token_generator. |
11 | def startTicker(self, reqId, contract, tickType):
ticker = self.tickers.get(id(contract))
if not ticker:
ticker = Ticker(
contract=contract, ticks=[], tickByTicks=[],
domBids=[], domAsks=[], domTicks=[])
self.tickers[id(contract)] = ticker... | Start a tick request that has the reqId associated with the contract.
Return the ticker. |
12 | def MakePmfFromList(t, name=):
hist = MakeHistFromList(t)
d = hist.GetDict()
pmf = Pmf(d, name)
pmf.Normalize()
return pmf | Makes a PMF from an unsorted sequence of values.
Args:
t: sequence of numbers
name: string name for this PMF
Returns:
Pmf object |
13 | def to_geopandas(raster, **kwargs):
df = to_pandas(raster, **kwargs)
df[] = df.apply(squares, georaster=raster, axis=1)
df = gp.GeoDataFrame(df, crs=from_string(raster.projection.ExportToProj4()))
return df | Convert GeoRaster to GeoPandas DataFrame, which can be easily exported to other types of files
and used to do other types of operations.
The DataFrame has the geometry (Polygon), row, col, value, x, and y values for each cell
Usage:
df = gr.to_geopandas(raster) |
14 | async def ensure_closed(self):
if self._writer is None:
return
send_data = struct.pack(, 1) + int2byte(COMMAND.COM_QUIT)
self._writer.write(send_data)
await self._writer.drain()
self.close() | Send quit command and then close socket connection |
15 | def add_missing_components(network):
new_trafo = str(network.transformers.index.astype(int).max() + 1)
network.add("Transformer", new_trafo, bus0="16573", bus1="23648",
x=0.135 / (2750 / 2),
r=0.0, tap_ratio=1, s_nom=2750 / 2)
def add_110kv_line(bus... | Add missing transformer at Heizkraftwerk Nord in Munich and missing
transformer in Stuttgart
Parameters
----------
network : :class:`pypsa.Network
Overall container of PyPSA
Returns
-------
network : :class:`pypsa.Network
Overall container of PyPSA |
16 | def get_attr_text(self):
return .join([
.format(key, value)
for key, value in self.attr.items()
]) | Get html attr text to render in template |
17 | def get_lldp_neighbor_detail_output_has_more(self, **kwargs):
config = ET.Element("config")
get_lldp_neighbor_detail = ET.Element("get_lldp_neighbor_detail")
config = get_lldp_neighbor_detail
output = ET.SubElement(get_lldp_neighbor_detail, "output")
has_more = ET.SubEle... | Auto Generated Code |
18 | def _get_directory_stash(self, path):
try:
save_dir = AdjacentTempDirectory(path)
save_dir.create()
except OSError:
save_dir = TempDirectory(kind="uninstall")
save_dir.create()
self._save_dirs[os.path.normcase(path)] = save_dir
r... | Stashes a directory.
Directories are stashed adjacent to their original location if
possible, or else moved/copied into the user's temp dir. |
19 | def get_dict_for_class(self, class_name, state=None, base_name=):
classes = []
klass = class_name
while True:
classes.append(klass)
if klass.__name__ == base_name:
break
klass = klass.__bases__[0]
if state is None:
... | The style dict for a given class and state.
This collects the style attributes from parent classes
and the class of the given object and gives precedence
to values thereof to the children.
The state attribute of the view instance is taken as
the current state if state is None.
... |
20 | def hashes(self):
hashes = set()
if (self.resources is not None):
for resource in self:
if (resource.md5 is not None):
hashes.add()
if (resource.sha1 is not None):
hashes.add()
if (resource.sha25... | Return set of hashes uses in this resource_list. |
21 | def _get_local_ip(self):
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.connect((, 80))
return sock.getsockname()[0]
except socket.error:
try:
return socket.gethostbyname(socket.gethostname())
... | Try to determine the local IP address of the machine. |
22 | def ss(inlist):
ss = 0
for item in inlist:
ss = ss + item * item
return ss | Squares each value in the passed list, adds up these squares and
returns the result.
Usage: lss(inlist) |
23 | def voronoi(script, region_num=10, overlap=False):
filter_xml = .join([
,
,
% region_num,
,
,
,
,
,
% str(overlap).lower(),
,
,
,
,
])
util.write_filter(script, filter_xml)
return None | Voronoi Atlas parameterization |
24 | def write_member(self, data):
if isinstance(data, basestring):
self.write(data)
else:
for text in data:
self.write(text)
self.close_member() | Writes the given data as one gzip member.
The data can be a string, an iterator that gives strings or a file-like object. |
25 | def make_gp_funs(cov_func, num_cov_params):
def unpack_kernel_params(params):
mean = params[0]
cov_params = params[2:]
noise_scale = np.exp(params[1]) + 0.0001
return mean, cov_params, noise_scale
def predict(params, x, y, xstar):
mean, cov_params,... | Functions that perform Gaussian process regression.
cov_func has signature (cov_params, x, x') |
26 | async def fetch_neighbourhood(lat: float, long: float) -> Optional[dict]:
lookup_url = f"https://data.police.uk/api/locate-neighbourhood?q={lat},{long}"
async with ClientSession() as session:
try:
async with session.get(lookup_url) as request:
if request.status == 404:... | Gets the neighbourhood from the fetch that is associated with the given postcode.
:return: A neighbourhood object parsed from the fetch.
:raise ApiError: When there was an error connecting to the API. |
27 | def newDocNodeEatName(self, ns, name, content):
if ns is None: ns__o = None
else: ns__o = ns._o
ret = libxml2mod.xmlNewDocNodeEatName(self._o, ns__o, name, content)
if ret is None:raise treeError()
__tmp = xmlNode(_obj=ret)
return __tmp | Creation of a new node element within a document. @ns and
@content are optional (None). NOTE: @content is supposed to
be a piece of XML CDATA, so it allow entities references,
but XML special chars need to be escaped first by using
xmlEncodeEntitiesReentrant(). Use xmlNewDocRawNo... |
28 | def position(self, chromosome, position, exact=False):
return self._clone(
filters=[GenomicFilter(chromosome, position, exact=exact)]) | Shortcut to do a single position filter on genomic datasets. |
29 | def oftype(self, typ):
for key, val in self.items():
if val.type == typ:
yield key | Return a generator of formatters codes of type typ |
30 | def url_join(url, path):
p = six.moves.urllib.parse.urlparse(url)
t = None
if p.path and p.path[-1] == :
if path and path[0] == :
path = path[1:]
t = .join([p.path, path])
else:
t = ( if path and path[0] == else ).join([p.path, path])
return six.moves.urll... | url version of os.path.join |
31 | def add_arguments(self, parser):
parser.add_argument(, nargs=1, choices=[],
help=)
return self.add_common_arguments(parser, True) | Adds the unlock command arguments to the parser.
Args:
self (UnlockCommand): the ``UnlockCommand`` instance
parser (argparse.ArgumentParser): the parser to add the arguments to
Returns:
``None`` |
32 | def plan_to_assignment(plan):
assignment = {}
for elem in plan[]:
assignment[
(elem[], elem[])
] = elem[]
return assignment | Convert the plan to the format used by cluster-topology. |
33 | def get_imagery(cls, lat, lon, date=None, dim=None, cloud_score=False):
instance = cls()
filters = {
: lat,
: lon,
: date,
: dim,
: cloud_score
}
return instance.get_resource(**filters) | Returns satellite image
Args:
lat: latitude float
lon: longitude float
date: date instance of available date from `get_assets`
dim: width and height of image in degrees as float
cloud_score: boolean to calculate the percentage of the image covered by ... |
34 | def write_artifacts_metadata(self):
if self.conf.artifacts_metadata_file:
logger.info(,
self.conf.artifacts_metadata_file)
with open(self.conf.artifacts_metadata_file, ) as fp:
json.dump(self.artifacts_metadata, fp) | Write out a JSON file with all built targets artifact metadata,
if such output file is specified. |
35 | def get_rva_from_offset(self, offset):
s = self.get_section_by_offset(offset)
if not s:
if self.sections:
lowest_rva = min( [ adjust_SectionAlignment( s.VirtualAddress,
self.OPTIONAL_HEADER.SectionAlignment, self.OPTIONAL_HEADER.FileAlig... | Get the RVA corresponding to this file offset. |
36 | def mixin_class(target, cls):
for name, field in getmembers(cls):
Mixin.mixin(target, field, name) | Mix cls content in target. |
37 | def to_json_data(self):
d = collections.OrderedDict((t.get_ref(), t.to_json_data()) for t in self._tables.values())
d["_comment"] = self._comment
d.move_to_end("_comment", last=False)
d["_external_files"] = self._dev_external_files_manager
return d | Returns
-------
A dictionary of serialized data. |
38 | def images_create(self, filename):
suffix = get_filename_suffix(filename, image.VALID_IMGFILE_SUFFIXES)
if not suffix is None:
return self.images.create_object(filename)
suffix = get_filename_suffix(filename, ARCHIVE_SUFFIXES)
... | Create and image file or image group object from the given file. The
type of the created database object is determined by the suffix of the
given file. An ValueError exception is thrown if the file has an unknown
suffix.
Raises ValueError if invalid file is given.
Parameters
... |
39 | def default(restart_cb=None, restart_func=None, close_fds=True):
if _active:
msg =
raise RuntimeWarning(msg)
_python_path = os.getenv()
if not _python_path:
msg =
raise RuntimeError(msg)
if restart_cb and not callable(restart_cb):
msg =
raise Typ... | Sets up lazarus in default mode.
See the :py:func:`custom` function for a more powerful mode of use.
The default mode of lazarus is to watch all modules rooted at
``PYTHONPATH`` for changes and restart when they take place.
Keyword arguments:
restart_cb -- Callback invoked prior to restartin... |
40 | def next_id(self):
id_str_lst = self._element.xpath()
used_ids = [int(id_str) for id_str in id_str_lst if id_str.isdigit()]
if not used_ids:
return 1
return max(used_ids) + 1 | Next available positive integer id value in this story XML document.
The value is determined by incrementing the maximum existing id value. Gaps in
the existing id sequence are not filled. The id attribute value is unique in the
document, without regard to the element type it appears on. |
41 | def register_arrays(self, arrays):
if isinstance(arrays, collections.Mapping):
arrays = arrays.itervalues()
for ary in arrays:
self.register_array(**ary) | Register arrays using a list of dictionaries defining the arrays.
The list should itself contain dictionaries. i.e.
.. code-block:: python
D = [{ 'name':'uvw', 'shape':(3,'ntime','nbl'),'dtype':np.float32 },
{ 'name':'lm', 'shape':(2,'nsrc'),'dtype':np.float32 }]
... |
42 | def indirect_font(font, fonts, text):
if font == "rnd-small" or font == "random-small" or font == "rand-small":
font = random.choice(RND_SIZE_DICT["small_list"])
return font
if font == "rnd-medium" or font == "random-medium" or font == "rand-medium":
font = random.choice(RND_SIZE_DI... | Check input font for indirect modes.
:param font: input font
:type font : str
:param fonts: fonts list
:type fonts : list
:param text: input text
:type text:str
:return: font as str |
43 | def _init_metadata(self):
self._choice_ids_metadata = {
: Id(self.my_osid_object_form._authority,
self.my_osid_object_form._namespace,
),
: ,
: ,
: False,
: False,
: False,
... | stub |
44 | def get_covariance(datargs, outargs, vargs, datvar, outvar):
argn = len(vargs)
nobs = 1
for m in xrange(argn):
a = vargs[m]
try:
a = datargs[a]
except (KeyError, TypeError):
a = outargs[a]
... | Get covariance matrix.
:param datargs: data arguments
:param outargs: output arguments
:param vargs: variable arguments
:param datvar: variance of data arguments
:param outvar: variance of output arguments
:return: covariance |
45 | def _has_not_qual(ntd):
for qual in ntd.Qualifier:
if in qual:
return True
if in qual:
return True
return False | Return True if the qualifiers contain a 'NOT |
46 | def vertical_horizontal_filter(data, period):
catch_errors.check_for_period_error(data, period)
vhf = [abs(np.max(data[idx+1-period:idx+1]) -
np.min(data[idx+1-period:idx+1])) /
sum([abs(data[idx+1-period:idx+1][i] - data[idx+1-period:idx+1][i-1]) for i in range(0, len(data[idx+1-perio... | Vertical Horizontal Filter.
Formula:
ABS(pHIGH - pLOW) / SUM(ABS(Pi - Pi-1)) |
47 | def query(self,
where="1=1",
out_fields="*",
timeFilter=None,
geometryFilter=None,
returnGeometry=True,
returnIDsOnly=False,
returnCountOnly=False,
returnFeatureClass=False,
returnDistinctValues... | queries a feature service based on a sql statement
Inputs:
where - the selection sql statement
out_fields - the attribute fields to return
timeFilter - a TimeFilter object where either the start time
or start and end time are defined t... |
48 | def pic_inflow_v2(self):
flu = self.sequences.fluxes.fastaccess
inl = self.sequences.inlets.fastaccess
flu.inflow = inl.q[0]+inl.s[0]+inl.r[0] | Update the inlet link sequences.
Required inlet sequences:
|dam_inlets.Q|
|dam_inlets.S|
|dam_inlets.R|
Calculated flux sequence:
|Inflow|
Basic equation:
:math:`Inflow = Q + S + R` |
49 | def __import_vars(self, env_file):
with open(env_file, "r") as f:
for line in f:
try:
line = line.lstrip()
if line.startswith():
line = line.replace(, , 1)
key, val = line.strip().split(, 1... | Actual importing function. |
50 | def attributive(adjective, gender=MALE, role=SUBJECT, article=None):
w, g, c, a = \
adjective.lower(), gender[:1].lower(), role[:3].lower(), article and article.lower() or None
if w in adjective_attributive:
return adjective_attributive[w]
if a is None \
or a in ("mir", "dir", "ihm"... | For a predicative adjective, returns the attributive form (lowercase).
In German, the attributive is formed with -e, -em, -en, -er or -es,
depending on gender (masculine, feminine, neuter or plural) and role
(nominative, accusative, dative, genitive). |
51 | def append_summary_to_module_docstring(module):
pairs = [(name, getattr(module, name)) for name in module.__all__]
kws = dict(key_header="Name", summary_type="module contents")
module.__doc__ = docstring_with_summary(module.__doc__, pairs, **kws) | Change the ``module.__doc__`` docstring to include a summary table based
on its contents as declared on ``module.__all__``. |
52 | def filter_parts(self, predicate=, exclude=True):
source = self._source_data
if source is None:
raise ExpressionError()
def _parse_partition_predicate(p):
if not in p:
raise ExpressionError()
field_name, field_value = [s.strip() for ... | Filter the data by partition string. A partition string looks like `pt1=1,pt2=2/pt1=2,pt2=1`, where
comma (,) denotes 'and', while (/) denotes 'or'.
:param str|Partition predicate: predicate string of partition filter
:param bool exclude: True if you want to exclude partition fields, otherwise ... |
53 | def transform(self, y):
if self.transform_type == :
return np.log(y)
elif self.transform_type == :
return np.exp(y)
elif self.transform_type == :
return np.sqrt(y)
elif self.transform_type == :
return np.sin(y)
elif self.tr... | Transform features per specified math function.
:param y:
:return: |
54 | def long_fname_format(fmt_str, fmt_dict, hashable_keys=[], max_len=64,
hashlen=16, ABS_MAX_LEN=255, hack27=False):
r
from utool import util_hash
fname = fmt_str.format(**fmt_dict)
if max_len is None:
return fname
if len(fname) > max_len:
fmt_dict_ = fmt... | r"""
DEPRICATE
Formats a string and hashes certain parts if the resulting string becomes
too long. Used for making filenames fit onto disk.
Args:
fmt_str (str): format of fname
fmt_dict (str): dict to format fname with
hashable_keys (list): list of dict keys you are willing to ... |
55 | def commercial_domains():
dus = domain_user_stats()
es = "test@" + pd.Series(dus.index, index=dus.index)
return set(
dus[~is_public_bulk(es) & ~is_university_bulk(es) & (dus > 1)].index) | Return list of commercial email domains, which means:
- domain is not public
- domain is not university
- it is not personal (more than 1 person using this domain)
>>> "google.com" in commercial_domains()
True
>>> "microsoft.com" in commercial_domains()
True
>>> "isri.cs.cmu.edu" in comm... |
56 | def _get_function_wrapper(
self, func: typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]]
) -> typing.Callable[..., typing.Any]:
raise NotImplementedError() | Here should be constructed and returned real decorator.
:param func: Wrapped function
:type func: typing.Callable[..., typing.Union[typing.Awaitable, typing.Any]]
:rtype: typing.Callable |
57 | def render_item(self, all_posts):
index = all_posts.index(self)
if index > 0:
newer_post = all_posts[index - 1]
else:
newer_post = None
if index < len(all_posts) - 1:
older_post = all_posts[index + 1]
else:
older_post ... | Renders the Post as HTML using the template specified in :attr:`html_template_path`.
:param all_posts: An optional :class:`PostCollection` containing all of the posts in the site.
:return: The rendered HTML as a string. |
58 | def removeTab(self, index):
widget = self.widget(index)
try:
self._widgets.remove(widget)
except ValueError:
pass
self.tab_closed.emit(widget)
self._del_code_edit(widget)
QTabWidget.removeTab(self, index)
if widget == self._current... | Removes tab at index ``index``.
This method will emits tab_closed for the removed tab.
:param index: index of the tab to remove. |
59 | def on_exception(wait_gen,
exception,
max_tries=None,
max_time=None,
jitter=full_jitter,
giveup=lambda e: False,
on_success=None,
on_backoff=None,
on_giveup=None,
logg... | Returns decorator for backoff and retry triggered by exception.
Args:
wait_gen: A generator yielding successive wait times in
seconds.
exception: An exception type (or tuple of types) which triggers
backoff.
max_tries: The maximum number of attempts to make before gi... |
60 | def GetParserFromFilename(self, path):
handler_name = path.split("://")[0]
for parser_cls in itervalues(GRRConfigParser.classes):
if parser_cls.name == handler_name:
return parser_cls
extension = os.path.splitext(path)[1]
if extension in [".yaml", ".yml"]:
return Yaml... | Returns the appropriate parser class from the filename. |
61 | def iat(x, maxlag=None):
if not maxlag:
maxlag = _find_max_lag(x)
acr = [autocorr(x, lag) for lag in range(1, maxlag + 1)]
gammas = [(acr[2 * i] + acr[2 * i + 1]) for i in range(maxlag // 2)]
cut = _cut_time(gammas)
if cut + 1 == len(gammas):
print_("Not enoug... | Calculate the integrated autocorrelation time (IAT), given the trace from a Stochastic. |
62 | def put(self, destination):
target = get_target_path(destination, self.dirname)
valid_paths = (self.dirname, % self.dirname)
with tarfile.open(self.archive_path, ) as tf:
members = []
for tarinfo in tf:
pathsplit = os.path.normp... | Copy the referenced directory to this path
Note:
This ignores anything not in the desired directory, given by ``self.dirname``.
Args:
destination (str): path to put this directory (which must NOT already exist)
References:
https://stackoverflow.com/a/826108... |
63 | def read(self, stream):
def read_it(stream):
bytes = stream.read()
transportIn = TMemoryBuffer(bytes)
protocolIn = TBinaryProtocol.TBinaryProtocol(transportIn)
topology = StormTopology()
topology.read(protocolIn)
return topology
... | Reads the topology from a stream or file. |
64 | def get_version(self, diff_to_increase_ratio):
diffs = self.get_diff_amounts()
version = Version()
for diff in diffs:
version.increase_by_changes(diff, diff_to_increase_ratio)
return version | Gets version
:param diff_to_increase_ratio: Ratio to convert number of changes into
:return: Version of this code, based on commits diffs |
65 | def reparentUnions(self):
removals = []
for u in self.unions:
parts = u.name.split("::")
if len(parts) >= 2:
parent_name = "::".join(p for p in parts[:-1])
reparented = False
... | Helper method for :func:`~exhale.graph.ExhaleRoot.reparentAll`. Namespaces and
classes should have the unions defined in them to be in the child list of itself
rather than floating around. Union nodes that are reparented (e.g. a union
defined in a class) will be removed from the list ``self.un... |
66 | def get(self, address):
loopback = super(LoopbackCollection, self).get(address=address)
if loopback:
return loopback
raise InterfaceNotFound() | Get a loopback address by it's address. Find all loopback addresses
by iterating at either the node level or the engine::
loopback = engine.loopback_interface.get('127.0.0.10')
:param str address: ip address of loopback
:raises InterfaceNotFound: invalid interface s... |
67 | def _ImportHookBySuffix(
name, globals=None, locals=None, fromlist=None, level=None):
_IncrementNestLevel()
if level is None:
level = 0 if six.PY3 else -1
try:
module = _real_import(name, globals, locals, fromlist, level)
finally:
... | Callback when an import statement is executed by the Python interpreter.
Argument names have to exactly match those of __import__. Otherwise calls
to __import__ that use keyword syntax will fail: __import('a', fromlist=[]). |
68 | def modify_column_if_table_exists(self,
tablename: str,
fieldname: str,
newdef: str) -> Optional[int]:
if not self.table_exists(tablename):
return None
sql = "ALTER TABL... | Alters a column's definition without renaming it. |
69 | def center(self):
if np.all(np.isfinite(self.xyz)):
return np.mean(self.xyz, axis=0) | The cartesian center of the Compound based on its Particles.
Returns
-------
np.ndarray, shape=(3,), dtype=float
The cartesian center of the Compound based on its Particles |
70 | def setup_config(self, cfg=None):
_opts, _args = optparse.OptionParser.parse_args(self)
configs = self.find_existing_configs(_opts.support_unit)
if configs and cfg not in configs:
cfg = configs[0]
return config.master_config(self.get_config_file_path(cfg)) | Open suitable config file.
:return: |
71 | def find_sanitiser_nodes(
sanitiser,
sanitisers_in_file
):
for sanitiser_tuple in sanitisers_in_file:
if sanitiser == sanitiser_tuple.trigger_word:
yield sanitiser_tuple.cfg_node | Find nodes containing a particular sanitiser.
Args:
sanitiser(string): sanitiser to look for.
sanitisers_in_file(list[Node]): list of CFG nodes with the sanitiser.
Returns:
Iterable of sanitiser nodes. |
72 | def hypercube_edges(dims, use_map=False):
edges = []
nodes = np.arange(np.product(dims)).reshape(dims)
for i,d in enumerate(dims):
for j in range(d-1):
for n1, n2 in zip(np.take(nodes, [j], axis=i).flatten(), np.take(nodes,[j+1], axis=i).flatten()):
edges.append((n1,... | Create edge lists for an arbitrary hypercube. TODO: this is probably not the fastest way. |
73 | def get_context_data(self, **kwargs):
context = super(FilterFormMixin, self).get_context_data(**kwargs)
context[self.context_filterform_name] = self.get_filter()
return context | Add filter form to the context.
TODO: Currently we construct the filter form object twice - in
get_queryset and here, in get_context_data. Will need to figure out a
good way to eliminate extra initialization. |
74 | async def volume(self, ctx, volume: int):
if ctx.voice_client is None:
return await ctx.send("Not connected to a voice channel.")
ctx.voice_client.source.volume = volume / 100
await ctx.send("Changed volume to {}%".format(volume)) | Changes the player's volume |
75 | def process_doc(text):
document = docutils.core.publish_doctree(text)
visitor = RefVisitor(document)
document.walk(visitor)
return visitor.kwd, visitor.values | The :ref: role is supported by Sphinx but not by plain docutils |
76 | def discharge(self):
rv = np.zeros(self.aq[0].naq)
Qls = self.parameters[:, 0] * self.dischargeinf()
Qls.shape = (self.nls, self.nlayers, self.order + 1)
Qls = np.sum(Qls, 2)
for i, q in enumerate(Qls):
rv[self.layers[i]] += q
re... | Discharge of the element in each layer |
77 | async def message_throttled(self, message: types.Message, throttled: Throttled):
handler = current_handler.get()
dispatcher = Dispatcher.get_current()
if handler:
key = getattr(handler, , f"{self.prefix}_{handler.__name__}")
else:
key = f"{self.prefix}_me... | Notify user only on first exceed and notify about unlocking only on last exceed
:param message:
:param throttled: |
78 | def generate_private_investment(asset_manager_id=None, asset_id=None, client_id=None):
attributes = generate_common(asset_manager_id=asset_manager_id, asset_id=asset_id)
private_investment = PrivateInvestment(client_id=client_id or random_string(5),
asset_issuer_i... | currency, display_name |
79 | def append(self, parent, content):
appender = self.default
for matcher, candidate_appender in self.appenders:
if matcher == content.value:
appender = candidate_appender
break
appender.append(parent, content) | Select an appender and append the content to parent.
@param parent: A parent node.
@type parent: L{Element}
@param content: The content to append.
@type content: L{Content} |
80 | def in_simo_and_inner(self):
return len(self.successor) > 1 and self.successor[0] is not None and not self.successor[0].in_or_out and \
len(self.precedence) == 1 and self.precedence[0] is not None and not self.successor[0].in_or_out | Test if a node is simo: single input and multiple output |
81 | def _stream_blob(self, key, fileobj, progress_callback):
file_size = None
start_range = 0
chunk_size = self.conn.MAX_CHUNK_GET_SIZE
end_range = chunk_size - 1
while True:
try:
blob = self.conn._get_blob(self.container_name, ke... | Streams contents of given key to given fileobj. Data is read sequentially in chunks
without any seeks. This requires duplicating some functionality of the Azure SDK, which only
allows reading entire blob into memory at once or returning data from random offsets |
82 | def dump_pk(obj, abspath,
pk_protocol=pk_protocol, replace=False, compress=False,
enable_verbose=True):
abspath = str(abspath)
msg = Messenger(enable_verbose=enable_verbose)
if compress:
root, ext = os.path.splitext(abspath)
if ext != ".gz":
if ... | Dump Picklable Python Object to file.
Provides multiple choice to customize the behavior.
:param obj: Picklable Python Object.
:param abspath: ``save as`` path, file extension has to be ``.pickle`` or
``.gz`` (for compressed Pickle).
:type abspath: string
:param pk_protocol: (default your... |
83 | def publish_predictions_to_core(self):
status = FAILED
msg = "not started"
try:
msg = "generating request"
log.info(msg)
publish_req = generate_ai_request(
predict_rows=self.df.fillna(
ANTINEX_MISSI... | publish_predictions_to_core |
84 | def get_organizations(self, page=None):
opts = {}
if page:
opts[] = page
return self.api_call(ENDPOINTS[][], **opts) | Get organizations |
85 | def create_salt(length: int=128) -> bytes:
return b.join(bytes([SystemRandom().randint(0, 255)]) for _ in range(length)) | Create a new salt
:param int length: How many bytes should the salt be long?
:return: The salt
:rtype: bytes |
86 | def delete_biggest(self):
logger.info(
"Deleting all mails sharing the biggest size of {} bytes..."
"".format(self.biggest_size))
candidates = [
mail for mail in self.pool
if mail.size == self.biggest_size]
if len(candidates) == s... | Delete all the biggest duplicates.
Keeps all mail of the duplicate set but those sharing the biggest
size. |
87 | def check_async(paths, options, rootdir=None):
LOGGER.info()
path_queue = Queue.Queue()
result_queue = Queue.Queue()
for num in range(CPU_COUNT):
worker = Worker(path_queue, result_queue)
worker.setDaemon(True)
LOGGER.info(, (num + 1))
worker.start()
for path i... | Check given paths asynchronously.
:return list: list of errors |
88 | def begin_batch(self):
self.is_batch = True
self.batch_table =
self.batch_partition_key =
self.batch_row_keys = []
self.batch_requests = [] | Starts the batch operation. Intializes the batch variables
is_batch:
batch operation flag.
batch_table:
the table name of the batch operation
batch_partition_key:
the PartitionKey of the batch requests.
batch_row_keys:
the RowKey list of a... |
89 | def subdivide_to_size(vertices,
faces,
max_edge,
max_iter=10):
done_face = []
done_vert = []
current_faces = np.array(faces,
dtype=np.int64,
copy=True)
current_vert... | Subdivide a mesh until every edge is shorter than a
specified length.
Will return a triangle soup, not a nicely structured mesh.
Parameters
------------
vertices : (n, 3) float
Vertices in space
faces : (m, 3) int
Indices of vertices which make up triangles
max_edge : float
... |
90 | def get_context_arguments(self):
cargs = {}
for context in self.__context_stack:
cargs.update(context.context_arguments)
return cargs | Return a dictionary containing the current context arguments. |
91 | def system(cmd, data=None):
import subprocess
s = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stdin=subprocess.PIPE)
out, err = s.communicate(data)
return out.decode() | pipes the output of a program |
92 | def Write2000256List(self, arr):
for item in arr:
ba = bytearray(binascii.unhexlify(item))
ba.reverse()
self.WriteBytes(ba) | Write an array of 64 byte items to the stream.
Args:
arr (list): a list of 2000 items of 64 bytes in size. |
93 | def midi2f(params, midi=69):
midi = create_buffer(params, midi)
output = 2**((midi - 69)/12)*440
return output | Convert a midi value to a frequency.
Midi value 69 corresponds to A4 (440Hz). Changing the midi value by 1 corresponds to
one semitone
:param params: buffer parameters, controls length of signal created
:param midi: midi value
:return: array of resulting frequency |
94 | def _env_runner(base_env, extra_batch_callback, policies, policy_mapping_fn,
unroll_length, horizon, preprocessors, obs_filters,
clip_rewards, clip_actions, pack, callbacks, tf_sess,
perf_stats, soft_horizon):
try:
if not horizon:
horizon = (... | This implements the common experience collection logic.
Args:
base_env (BaseEnv): env implementing BaseEnv.
extra_batch_callback (fn): function to send extra batch data to.
policies (dict): Map of policy ids to PolicyGraph instances.
policy_mapping_fn (func): Function that maps agen... |
95 | def _fetch(self, url, params):
if not self.from_archive:
self.sleep_for_rate_limit()
headers = {: + self.api_key}
r = self.fetch(url, payload=params, headers=headers)
if not self.from_archive:
self.update_rate_limit(r)
return r.text | Fetch a resource.
Method to fetch and to iterate over the contents of a
type of resource. The method returns a generator of
pages for that resource and parameters.
:param url: the endpoint of the API
:param params: parameters to filter
:returns: the text of the respons... |
96 | def run_flow(flow, storage, flags=None, http=None):
if flags is None:
flags = argparser.parse_args()
logging.getLogger().setLevel(getattr(logging, flags.logging_level))
if not flags.noauth_local_webserver:
success = False
port_number = 0
for port in flags.auth_host_port:... | Core code for a command-line application.
The ``run()`` function is called from your application and runs
through all the steps to obtain credentials. It takes a ``Flow``
argument and attempts to open an authorization server page in the
user's default web browser. The server asks the user to grant your... |
97 | def upload_and_confirm(self, incoming, **kwargs):
response_dict = self.upload(incoming)
if in response_dict:
log.warning()
return response_dict
if isinstance(incoming, Info):
kwargs.setdefault(, incoming.thumb_nail_left)
kwargs.setdefault... | Upload the file to okcupid and confirm, among other things, its
thumbnail position.
:param incoming: A filepath string, :class:`.Info` object or
a file like object to upload to okcupid.com.
If an info object is provided, its thumbnail
... |
98 | def get_dyndns_records(login, password):
params = dict(action=, sha=get_auth_key(login, password))
response = requests.get(, params=params, timeout=timeout)
raw_records = (line.split() for line in response.content.split())
try:
records = frozenset(DnsRecord(*record) for record in raw_recor... | Gets the set of dynamic DNS records associated with this account |
99 | def _repr_html_(self):
out="<table class=>\n"
if not(self.name()[:4]=="Col_"):
out+="<tr>"
out+="<th><b>"+self.name()+"</b></th>"
out+="</tr>"
cropped=False
rowcount=0
colkeywords=self.getkeywords()
for row in self:
... | Give a nice representation of columns in notebooks. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.