content stringlengths 22 815k | id int64 0 4.91M |
|---|---|
def AllenAtlas(res_um=25, par=None):
"""
Instantiates an atlas.BrainAtlas corresponding to the Allen CCF at the given resolution
using the IBL Bregma and coordinate system
:param res_um: 25 or 50 um
:return: atlas.BrainAtlas
"""
if par is None:
# Bregma indices for the 10um Allen Bra... | 13,900 |
def mplot3d(f, var1, var2, show=True):
"""
Plot a 3d function using matplotlib/Tk.
"""
import warnings
warnings.filterwarnings("ignore", "Could not match \S")
p = import_module('pylab')
# Try newer version first
p3 = import_module('mpl_toolkits.mplot3d',
__import__kwargs={'from... | 13,901 |
def nsd_delete1(ctx, name, force):
"""deletes a NSD/NSpkg
NAME: name or ID of the NSD/NSpkg to be deleted
"""
nsd_delete(ctx, name, force) | 13,902 |
def get_devices_properties(device_expr,properties,hosts=[],port=10000):
"""
Usage:
get_devices_properties('*alarms*',props,
hosts=[get_bl_host(i) for i in bls])
props must be an string as passed to Database.get_device_property();
regexp are not enabled!
get_matching_device_prop... | 13,903 |
def benchmark_classifier(cls, y_test, labels, scoring):
"""
Use 10-fold cross validation to benchmark the performance of the provided classifier.
Parameters
----------
cls : estimator object implementing `fit`
y_test : array
labels : list (of strings)
List containing the labels tha... | 13,904 |
def atexit_shutdown_grace_period(grace_period=-1.0):
"""Return and optionally set the default worker cache shutdown grace period.
This only affects the `atexit` behavior of the default context corresponding to
:func:`trio_parallel.run_sync`. Existing and future `WorkerContext` instances
are unaffected.... | 13,905 |
def combine_files(root, pattern=None):
"""Combine all files in root path directory
Parameters:
root (str) : file path to directory of files
pattern (str) : optional file pattern to search for in directory
Returns:
combined files
"""
if pattern is not None:
files = [... | 13,906 |
def get_point(points, cmp, axis):
""" Get a point based on values of either x or y axys.
:cmp: Integer less than or greater than 0, representing respectively
< and > singhs.
:returns: the index of the point matching the constraints
"""
index = 0
for i in range(len(points)):
if cmp <... | 13,907 |
def benchmark_parser_header_16(nb_headers, nb_fields, do_checksum=False):
"""
This method generate the P4 program to benchmark the P4 parser
:param nb_headers: the number of generic headers included in the program
:type nb_headers: int
:param nb_fields: the number of fields (16 bits) in each header... | 13,908 |
def check_result(reference, result_list, enable_assertion):
"""Checks result, warns when latency is abnormal.
Args:
reference: { environment : reference_time}, environment is a string tuple
while reference_time is a float number.
result_list: a list of tuple.
enable_assertion: bool, throw asserti... | 13,909 |
def create_jobs_list(chunks, outdir, *filters):
# TO DO
# Figure out the packing/unpacking
"""
Create a list of dictionaries that hold information for the given
chunks
Arguments:
chunks: list: A list of lists. Each nested list contains the
filepaths to be processed
outdir: Pat... | 13,910 |
def per_symbol_to_per_seq_probs(per_symbol_probs, tgt_out_idx):
""" Gather per-symbol probabilities into per-seq probabilities """
# per_symbol_probs shape: batch_size, seq_len, candidate_size
# tgt_out_idx shape: batch_size, seq_len
# output shape: batch_size, 1
return torch.prod(
torch.gat... | 13,911 |
def append_open_buildinfo(buildinfo_path, files=open_buildinfo_files):
"""Append BUILD-INFO.txt with open section for open_buildinfo_files"""
if os.path.exists(os.path.join(buildinfo_path, BUILDINFO)):
try:
bifile = open(os.path.join(buildinfo_path, BUILDINFO), "a")
try:
... | 13,912 |
def solve_game(payoffs):
""" given payoff matrix for a zero-sum normal-form game,
return first mixed equilibrium (may be multiple)
returns a tuple of numpy arrays """
# .vertex_enumeration()
# .lemke_howson(initial_dropped_label=0) - does not return *all* equilibrium
game = nash.Game(payoffs)
... | 13,913 |
def emit_settings_changed_event(user, db_table, changed_fields: Dict[str, Tuple[Any, Any]]):
"""Emits an event for a change in a setting.
Args:
user (User): the user that this setting is associated with.
db_table (str): the name of the table that we're modifying.
changed_fields: all cha... | 13,914 |
def zone_features(df, zfeatures, aufeatures):
"""Create zone features from the data
Args:
df (DataFrame): Input dataframe
zfeatures (list): List of zone median features
aufeatures (list): List of zone autocorr features
Return: 2 dataframes
"""
# Medians from the last 1,... | 13,915 |
def get_active_validator_indices(validators: [ValidatorRecord]) -> List[int]:
"""
Gets indices of active validators from ``validators``.
"""
return [i for i, v in enumerate(validators) if is_active_validator(v)] | 13,916 |
def test_tag_links_render_on_bookmarks_list(client, db_setup):
"""When a user views their bookmark list - links to the tags should be
incldued in the respose, but it should NOT include links to tags
associated with other user's bookmarks.
"""
user = db_setup.get("homer")
login = client.login(... | 13,917 |
def parse_decodes(sentences, predictions, lengths, label_vocab):
"""Parse the padding result
Args:
sentences (list): the tagging sentences.
predictions (list): the prediction tags.
lengths (list): the valid length of each sentence.
label_vocab (dict): the label vocab.
Retur... | 13,918 |
def get_weekday(start_date, end_date, weekday_nums, repeat=None):
"""
获取一段时间范围内每个周天对应的日期
:param start_date:
:param end_date:
:param weekday_nums: list, 星期对应数字 0 ~ 6
:param repeat:
:return:
"""
sdate = datetime.datetime.strptime(start_date, date_pattern1)
edate = datetime.datetim... | 13,919 |
def context_processor(target):
"""
Decorator that allows context processors with parameters to be assigned
(and executed properly) in a RequestContext
Example::
return render_to_response(
template_name,
context_instance=RequestContext(
request,
processors=[
... | 13,920 |
def write_checkgroups(groups, path):
""" Write the current checkgroups file.
Arguments: groups (a dictionary representing a checkgroups)
path (path of the checkgroups file)
No return value
"""
keys = groups.keys()
keys.sort()
checkgroups_file = file(path, 'wb')
... | 13,921 |
def test_put_object_from_filelike(repository, generate_directory):
"""Test the ``Repository.put_object_from_filelike`` method."""
directory = generate_directory({'file_a': None, 'relative': {'file_b': None}})
with open(directory / 'file_a', 'rb') as handle:
repository.put_object_from_filelike(handl... | 13,922 |
def data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_committed_burst_size_delete(uuid, local_id): # noqa: E501
"""data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_committed_burst_size_delete
removes... | 13,923 |
def BuildWagonCommand(state):
"""Build wagon command to build MCEP model for the given state.
Example command -
$ESTDIR/bin/wagon
-track_feats 1-50
-vertex_output mean
-desc festival/clunits/mcep.desc
-data 'festival/feats/w_3.feats'
-test 'festival/feats/w_3.feats'
-balance 0
-track 'festival/dist... | 13,924 |
def decode_messages(fit_bytes: bytes) -> typing.List[typing.Dict]:
"""Decode serialized messages.
Arguments:
fit_bytes: Encoded messages
Returns:
Decoded messages
"""
messages = []
for line in fit_bytes.splitlines():
payload = json.loads(line)
messages.append(s... | 13,925 |
def main() :
"""
Main Function: to compute performance of Two Models with different non-linear functions for different losses
Namely, this will generate 4 graphs, for losses MSE and MAE, for each non-linear type Relu and Tanh
:return: nothing but four graphs and write the number of errors in the terminal
"""
###... | 13,926 |
def test_parse_no_pint_objects_in_df():
"""Test that there are no Pint quantities in dataframes created by parser."""
input_file = get_test_data('metar_20190701_1200.txt', mode='rt')
metar_str = ('KSLK 011151Z AUTO 21005KT 1/4SM FG VV002 14/13 A1013 RMK AO2 SLP151 70043 '
'T01390133 10139 2... | 13,927 |
def read_DELETE(msg, hosts):
"""Parse the DELETE request and send data to the response generator function
Args:
msg (String): The request message to parse
hosts (List): The array of hosts
Returns:
List: An array of information about the request, including status code,
... | 13,928 |
def iresnet101(pretrained=False, progress=True, **kwargs):
"""
Constructs the IResNet-101 model trained on Glint360K(https://github.com/deepinsight/insightface/tree/master/recognition/partial_fc#4-download).
.. note::
The required input size of the model is 112x112.
Args:
pretrained (b... | 13,929 |
def get_full_jwt(user: User) -> Dict:
"""
Get a full jwt response from the username and uid token.
"""
return {
'access_token': create_access_token(identity=user, fresh=True),
'refresh_token': create_refresh_token(identity=user)
} | 13,930 |
def delete():
""" (Local command) Deletes the current note.
"""
path = vim.current.buffer.name
if exists(path):
confirm = vim.eval('input("really delete? (y/n): ")')
if confirm in ("y", "Y"):
remove(path)
vim.command("bd!")
vim.command("redraw!") | 13,931 |
def GridSearch_Prophet(prophet_grid, metric='mape'):
"""
GridSearch tool to determine the optimal parameters for prophet
Args:
- prophet_grid: List of parameters. Enter it as list(ParameterGrid(prophet_grid)
- metric: String. Not used yet. May be used to change the metric used to sort
... | 13,932 |
def test_reference_links_548():
"""
Test case 548: Unicode case fold is used:
"""
# Arrange
source_markdown = """[ẞ]
[SS]: /url"""
expected_tokens = [
"[para(1,1):]",
"[link(1,1):shortcut:/url:::::ẞ:::::]",
"[text(1,2):ẞ:]",
"[end-link::]",
"[end-para::... | 13,933 |
def _check_max_features(importances, max_features):
"""Interpret the max_features value"""
n_features = len(importances)
if max_features is None:
max_features = n_features
elif isinstance(max_features, int):
max_features = min(n_features, max_features)
elif isinstance(max_feature... | 13,934 |
def map_blocks(func, data):
"""Curried version of Dask's map_blocks
Args:
func: the function to map
data: a Dask array
Returns:
a new Dask array
>>> f = map_blocks(lambda x: x + 1)
>>> f(da.arange(4, chunks=(2,)))
dask.array<lambda, shape=(4,), dtype=int64, chunksize=(2,)>
... | 13,935 |
def download_and_extract(
package: str,
directory: Path,
version: Optional[str] = None,
remove_after: bool = False
) -> Path:
"""Modified to allow avoiding removing files after.
Parameters
----------
package
directory
version
remove_after
Returns
---... | 13,936 |
def test_case_3_b(setup_leveldb, setup_sqlite):
""" Test case 3.b:
3. Incoming record contains 2+ OCNs that resolve to two Concordance Table primary record
b. Record OCNs + OCLC OCNs match one CID
Test datasets:
Zephir cluster: one match
CID: 008648991
OCNs: 4912741,... | 13,937 |
def dump_config(exp_dir: str, config: Union[ConfigDict, FrozenConfigDict]) -> None:
"""Dump a config to disk.
Args:
exp_dir (str): Path to the experiment directory.
config (Union[ConfigDict, FrozenConfigDict]): The config to dump.
"""
if not os.path.exists(exp_dir):
os.makedirs(... | 13,938 |
def premises_to_syllogism(premises):
"""
>>> premises_to_syllogism(["Aab", "Ebc"])
'AE1'
"""
figure = {"abbc": "1", "bacb": "2", "abcb": "3", "babc": "4"}[premises[0][1:] + premises[1][1:]]
return premises[0][0] + premises[1][0] + figure | 13,939 |
def fix_fits_keywords(header):
"""
Update header keyword to change '-' by '_' as columns with '-' are not
allowed on SQL
"""
new_header = {}
for key in header.keys():
new_key = key.replace('-', '_')
new_header[new_key] = header[key]
# Temporary fix - needs to be removed
... | 13,940 |
def request_password(email: str, mailer: Mailer, _tn: Translator):
"""
Create new hashed password and send mail..
:param email: Mail-address which should be queried
:param mailer: pyramid Mailer
:param _tn: Translator
:return: dict with info about mailing
"""
db_user = DBDiscussionSessi... | 13,941 |
def is_sum_lucky(x, y):
"""This returns a string describing whether or not the sum of input is lucky
This function first makes sure the inputs are valid and then calculates the
sum. Then, it will determine a message to return based on whether or not
that sum should be considered "lucky"
"""
if x... | 13,942 |
def unsync_function(func, *args, **kwargs):
"""Runs an async function in a standard blocking way and returns output"""
return asyncio.run(func(*args, **kwargs)) | 13,943 |
def zip_results(name: str, recipes: Iterable[Recipe], cache=CacheType.Auto) \
-> Recipe[Union[List[Tuple[Any, ...]], Dict[Any, Tuple[Any, ...]]]]:
"""
Create a Recipe that zips the outputs from a number of recipes into elements, similar to Python's built-in zip().
Notably, dictionaries are handled a... | 13,944 |
def get_muscle_reference_dictionary():
"""
The
@article{bashkatov2011optical,
title={Optical properties of skin, subcutaneous, and muscle tissues: a review},
author={Bashkatov, Alexey N and Genina, Elina A and Tuchin, Valery V},
journal={Journal of Innovative Op... | 13,945 |
def flake8_entrypoint(physical_line: str) -> Optional[Tuple[int, str]]:
"""Flake8 plugin entrypoint that operates on physical lines."""
match = RX_TODO_OR_ELSE.search(physical_line)
if match:
by = match.group(2)
pact = match.group(3).strip()
try:
TodoOrElse().by(pact, by=... | 13,946 |
def test_to_config_dict_given_non_configurable() -> None:
"""
Test that ``to_config_dict`` errors when passed an instance that does not
descend from configurable.
"""
class SomeOtherClassType (object):
pass
inst = SomeOtherClassType()
with pytest.raises(ValueError,
... | 13,947 |
def smartquotes(text):
"""
Runs text through pandoc for smartquote correction.
This script accepts a paragraph of input and outputs typographically correct
text using pandoc. Note line breaks are not retained.
"""
command = shlex.split('pandoc --smart -t plain')
com = Popen(command, shell=... | 13,948 |
def get_nearest_stations_xy(x, y, variable, n=1, stations=None, ignore=None):
"""find the KNMI stations that measure 'variable' closest to the
x, y coordinates
Parameters
----------
x : int or float
x coordinate in RD
y : int or float
x coordinate in RD
variable : str
... | 13,949 |
def parseStdInput():
"""Obtain a graph by parsing the standard input
as per the format specified in the PACE Challange.
"""
edges = [(1,2),(2,3),(3,4),(4,1)]
G = nx.Graph()
G.add_edges_from(edges)
return G | 13,950 |
def train_worker(dpw,
reverb_client,
config):
"""Train worker loop.
Collects rollouts and writes sampled discounted state visitation to reverb
indefinitely (xmanager should kill this job when the learner job ends).
Args:
dpw: The discretized puddle world to use.
rever... | 13,951 |
def strfdelta(tdelta, fmt):
""" Get a string from a timedelta.
"""
f, d = Formatter(), {}
l = {"D": 86400, "H": 3600, "M": 60, "S": 1}
k = list(map(lambda x: x[1], list(f.parse(fmt))))
rem = int(tdelta.total_seconds())
for i in ("D", "H", "M", "S"):
if i in k and i in l.keys():
... | 13,952 |
def get_file_list_from_dir(parent_dir: Path, file_mask: str = "*") -> list:
"""
Recursively gets a list of files in a Path directory with the specified name mask
and return absolute string paths for files
"""
get_logger(__name__).debug("Iterating for files in '{}'".format(parent_dir.absolute()))
... | 13,953 |
def imputation_Y(X, model):
"""Perform imputation. Don't normalize for depth.
Args:
X: feature matrix from h5.
model: a trained scBasset model.
Returns:
array: a peak*cell imputed accessibility matrix. Sequencing depth
isn't corr... | 13,954 |
def format_percent(x, _pos=None):
"""
plt.gca().yaxis.set_major_formatter(format_percent)
"""
x = 100 * x
if abs(x - round(x)) > 0.05:
return r"${:.1f}\%$".format(x)
else:
return r"${:.0f}\%$".format(x) | 13,955 |
def is_row_and_col_balanced(T1, T2):
"""
Partial latin squares T1 and T2 are balanced if the symbols
appearing in row r of T1 are the same as the symbols appearing in
row r of T2, for each r, and if the same condition holds on
columns.
EXAMPLES::
sage: from sage.combinat.matrices.latin... | 13,956 |
def submit(ds, entry_name, molecule, index):
"""
Submit an optimization job to a QCArchive server.
Parameters
----------
ds : qcportal.collections.OptimizationDataset
The QCArchive OptimizationDataset object that this calculation
belongs to
entry_name : str
The base entr... | 13,957 |
def db():
"""Database queries (including initialization).""" | 13,958 |
def list_isos(apiclient, **kwargs):
"""Lists all available ISO files."""
cmd = listIsos.listIsosCmd()
[setattr(cmd, k, v) for k, v in kwargs.items()]
return(apiclient.listIsos(cmd)) | 13,959 |
async def edit_message_live_location(
token: str = TOKEN_VALIDATION,
latitude: float = Query(..., description='Latitude of new location'),
longitude: float = Query(..., description='Longitude of new location'),
chat_id: Optional[Union[int, str]] = Query(None, description='Required if inline_message_id i... | 13,960 |
def process_rollout(rollout, gamma, lambda_=1.0):
"""
given a rollout, compute its returns and the advantage
"""
batch_si = np.asarray(rollout.states)
batch_a = np.asarray(rollout.actions)
rewards = np.asarray(rollout.rewards)
action_reward = np.concatenate((batch_a,rewards[:,np.newaxis]), a... | 13,961 |
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Add sensors for passed config_entry in HA."""
coordinator: IotawattUpdater = hass.data[DOMAIN][config_entry.entry_id]
created = set()
@callback
def _create_entity(key: str) -> IotaWattSensor:
"""Create a sensor entity."... | 13,962 |
def calculate_cost(cost, working_days_flag, month, nr_of_passes):
"""Calculate the monthly tolls cost"""
if working_days_flag:
passes = working_days(month) * nr_of_passes
else:
now = datetime.datetime.now()
passes = calendar.monthrange(now.year, month)[1] * nr_of_passes
total_cos... | 13,963 |
def e_dl() -> str:
"""Fetch size of archives to be downloaded for next system update."""
size = 'Calculating...'
with open(file=TMERGE_LOGFILE, mode='r', encoding='utf-8') as log_file:
for line in list(log_file)[::-1]:
reqex = search(r'(Size of downloads:.)([0-9,]*\s[KMG]iB)', line)
... | 13,964 |
def get_model_fn():
"""Returns the model definition."""
def model_fn(features, labels, mode, params):
"""Returns the model function."""
feature = features['feature']
print(feature)
labels = labels['label']
one_hot_labels = model_utils.get_label(
labels,
params,
FLAGS.src... | 13,965 |
def build_node(idx, node_type):
""" Build node list
:idx: a value to id mapping dict
:node_type: a string describe the node type
:returns: a list of records of the nodes extracted from the mapping
"""
return rekey(idx, 'value', 'id:ID', {':LABEL': node_type}) | 13,966 |
def _getTypeFromExtension(path, mode='write'):
"""
Parameters
----------
path : str
path from with to pull the extension from - note that it may NOT be
ONLY the extension - ie, "obj" and ".obj", will not work, but
"foo.obj" will
mode : {'write', 'read'}
the type is ba... | 13,967 |
def residual_block(filters, repetitions,kernel_size=(3,3),strides=(2,2), is_first_layer=False):
"""Builds a residual block with repeating bottleneck blocks.
"""
def f(input):
for i in range(repetitions):
init_strides = (1, 1)
if i == 0 and not is_first_layer:
... | 13,968 |
def vpg_omega(X,Y,Gamma=1, sigma=1, polarIn=False):
"""
Vorticity distribution for 2D Gaussian vortex patch
"""
if polarIn:
r = X
else:
r = np.sqrt(X ** 2 + Y ** 2)
omega_z = Gamma/(np.pi*sigma) * (np.exp(- r**2/sigma**2))
return omega_z | 13,969 |
async def _run_filter_tests(hass, tests, process_queue, mock_batch):
"""Run a series of filter tests on azure event hub."""
for test in tests:
hass.states.async_set(test.id, STATE_ON)
await hass.async_block_till_done()
await process_queue(None)
if test.should_pass:
m... | 13,970 |
def test_example_file_passing_using_fixture(fs_reload_example):
"""Test passes if using a fixture that reloads the module containing
EXAMPLE_FILE"""
fs_reload_example.create_file(example.EXAMPLE_FILE, contents='stuff here')
check_that_example_file_is_in_fake_fs() | 13,971 |
def autoload():
"""
Attempts to load (import) notification handlers from modules defined in ``PYNOTIFY_AUTOLOAD_MODULES``
"""
modules = settings.AUTOLOAD_MODULES
if modules:
for module in modules:
try:
import_module(module)
except ImportError:
... | 13,972 |
def get_outgroup(tree: CassiopeiaTree, triplet: Tuple[str, str, str]) -> str:
"""Infers the outgroup of a triplet from a CassioepiaTree.
Finds the outgroup based on the depth of the latest-common-ancestors
of each pair of items. The pair with the deepest LCA is the
ingroup and the remaining leaf is th... | 13,973 |
def delete_host(resource_root, host_id):
"""
Delete a host by id
@param resource_root: The root Resource object.
@param host_id: Host id
@return: The deleted ApiHost object
"""
return call(resource_root.delete, "%s/%s" % (HOSTS_PATH, host_id), ApiHost) | 13,974 |
def get_preds(model: nn.Module, image: Union[np.array, str], **kwargs) -> Tuple[List]:
"""
Generated predictions for the given `image` using `model`.
"""
logger = _get_logger(name=__name__)
# load in the image if string is give
if isinstance(image, str):
image = Image.open(image).conver... | 13,975 |
def get_csv_data(filepath, source='DoQ'):
"""
Yield a large csv row by row to avoid memory overload
"""
if source =='DoQ':
with open(filepath, "rt") as csvfile:
datareader = csv.reader(csvfile, delimiter='\t')
for row in datareader:
if row[2] == 'LENGTH' o... | 13,976 |
async def unblock_func(func_name:object,
func_args,
logger=None,
default_res=None,
is_new_loop=False,):
"""
异步函数非阻塞
:param func_name: def 函数对象名
:param func_args: 请求参数可迭代对象(必须遵循元素入参顺序!)
:param logger:
:par... | 13,977 |
def replace_if_has_wiki_link(line: str, folder_dict: Dict) -> Tuple[str, int]:
""" ^title
:return: (string with all wikis replaced, replacement count)
"""
embed_rule = re.compile(re_md_reference)
wiki_partial_rule = re.compile(re_md_wiki_partial)
wiki_rule = re.compile(re_md_wiki)
ne... | 13,978 |
def init_w(w, n):
"""
:purpose:
Initialize a weight array consistent of 1s if none is given
This is called at the start of each function containing a w param
:params:
w : a weight vector, if one was given to the initial function, else None
NOTE: w MUST be an array of np.float6... | 13,979 |
def parallelise_edges(xy, edges, targets, i_nbrs, ij_e, fixed=None, kmax=100, lmin=None, lmax=None, callback=None):
"""Parallelise the edges of a mesh to given target vectors.
Parameters
----------
xy : list
The XY coordinates of the vertices of the edges.
edges : list
The edges as ... | 13,980 |
def _ensure_min_resources(progs, cores, memory, min_memory):
"""Ensure setting match minimum resources required for used programs.
"""
for p in progs:
if p in min_memory:
if not memory or cores * memory < min_memory[p]:
memory = float(min_memory[p]) / cores
return cor... | 13,981 |
def ctypes_pointer(name):
"""Create a ctypes type representing a C pointer to a custom data type ``name``."""
return type("c_%s_p" % name, (ctypes.c_void_p,), {}) | 13,982 |
def generate_linear_data(n, betas, sigma):
"""Generate pandas df with x and y variables related by a linear equation.
Export data as csv.
:param n: Number of observations.
:param betas: beta parameters.
:param sigma: standard deviation
:return: None
"""
x = np.linspace(start=0.0, stop=1.... | 13,983 |
def entry(
text,
*,
foreground: str = "",
background: str = "",
sgr: str = "",
jump_line: str = "\n> ",
) -> str:
"""
This function is derived from the input, but with the option of
coloring it and some different formatting.
Note: If you use Windows, the coloring opti... | 13,984 |
def vrotate_3D(vec: np.ndarray,
ref: np.ndarray) -> np.ndarray:
"""Rotates a vector in a 3D space.
Returns the rotation matrix for `vec` to match the orientation of a
reference vector `ref`.
https://math.stackexchange.com/questions/180418/calculate-rotation-matrix-to-align-vector-a-to-... | 13,985 |
def inject_data(image,
key=None, net=None, metadata=None, admin_password=None,
files=None, partition=None, use_cow=False):
"""Injects a ssh key and optionally net data into a disk image.
it will mount the image as a fully partitioned disk and attempt to inject
into the speci... | 13,986 |
def asymptotic_decay(learning_rate, t, max_iter):
"""Decay function of the learning process.
Parameters
----------
learning_rate : float
current learning rate.
t : int
current iteration.
max_iter : int
maximum number of iterations for the training.
"""
return le... | 13,987 |
def get_cell_content(browser, author):
"""
get novel cells
return [cell, cell, cell]
"""
content = list()
cells = browser.find_all(class_='t t2')
for cell in cells:
if cell.find(class_='r_two').b.string != author:
continue
for cell_content in cell.find(class_=['tp... | 13,988 |
def add_supported_cxxflags(self, cxxflags):
"""
Check which cxxflags are supported by compiler and add them to env.CXXFLAGS variable
"""
if len(cxxflags) == 0:
return
self.start_msg('Checking supported CXXFLAGS')
supportedFlags = []
for flags in cxxflags:
flags = Utils.to_l... | 13,989 |
def iter_meta_refresh(file):
"""Iterate through meta refreshes from a file.
Args:
file: str, path-like, or file-like object
"""
try:
fh = open(file, 'rb')
except TypeError:
fh = file
except FileNotFoundError:
fh = None
if not fh:
return
try:
... | 13,990 |
def kmeans(data: cp.ndarray, K: int, centroids: cp.ndarray):
"""
Clusters points into k clusters using k_means clustering.
"""
print("Start K-means clustering.")
N, D = data.shape
new_centroids = cp.full((K, D), 0.0)
loop = 1
while loop < 200:
# assign each point to near... | 13,991 |
def _init_unique_pool(_ic_profile_dic, _cac_profile_dic, _ic_shape, _cac_shape):
"""initialize pool, function used to put data into shared memory"""
print(f"- Initialize core with illumination correction profiles for {list(_ic_profile_dic.keys())}")
init_dic['illumination'] = _ic_profile_dic
print(f"- I... | 13,992 |
def do_host_describe(cs, args):
"""Describe a specific host."""
result = cs.hosts.get(args.host)
columns = ["HOST", "PROJECT", "cpu", "memory_mb", "disk_gb"]
utils.print_list(result, columns) | 13,993 |
def triangle_area(a, h):
"""Given length of a side and high return area for a triangle.
>>> triangle_area(5, 3)
7.5
"""
#[SOLUTION]
return a * h / 2.0 | 13,994 |
def generate_exclude_file1_file2(HWEresults_file, batches_list, draw_script, all_output, perbatch_output, allbatches_output, FDR_index_remove_variants):
"""generate exclude file 1: From HWE calculations across the entire
collection, remove variants for which HWE fails even if the worst batch
removed (i.e. e... | 13,995 |
def create_mongo_handler(config):
"""
:param config: configuration dictionary
:return: [MongoLogHandler, ] if 'mongo_logger' is in options, else []
"""
from nicos.core import ConfigurationError
if hasattr(config, 'mongo_logger'):
url = urllib.parse.urlparse(config.mongo_logger)
... | 13,996 |
def split_and_load(data, ctx_list, batch_axis=0, even_split=True):
"""Splits an NDArray into `len(ctx_list)` slices along `batch_axis` and loads
each slice to one context in `ctx_list`.
Parameters
----------
data : NDArray
A batch of data.
ctx_list : list of Context
A list of Co... | 13,997 |
def _get_scoped_outputs(comp, g, explicit_outs):
"""Return a list of output varnames scoped to the given name."""
cnamedot = comp.name + '.'
outputs = set()
if explicit_outs is None:
explicit_outs = ()
for u,v in g.list_connections():
if u.startswith(cnamedot):
outputs.a... | 13,998 |
def init_ha_active(datanode, cluster):
"""
Do initial HA setup on the leader.
"""
local_hostname = hookenv.local_unit().replace('/', '-')
hadoop = get_hadoop_base()
hdfs = HDFS(hadoop)
hdfs.stop_namenode()
remove_state('namenode.started')
# initial cluster is us (active) plus a stand... | 13,999 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.