content stringlengths 22 815k | id int64 0 4.91M |
|---|---|
def get_input(label, default=None):
"""Prompt the user for input.
:param label: The label of the prompt.
:param label: str
:param default: The default value.
:rtype: str | None
"""
if default:
_label = "%s [%s]: " % (label, default)
else:
_label = "%s: " % label
... | 16,000 |
def get_pretrain_data_text(data, batch_size, num_ctxes, shuffle,
num_buckets, vocab, tokenizer, max_seq_length, short_seq_prob,
masked_lm_prob, max_predictions_per_seq, whole_word_mask,
num_parts=1, part_idx=0, num_workers=1):
"""Get a... | 16,001 |
def define_app_flags(scenario_num):
""" Define the TensorFlow application-wide flags
Returns:
FLAGS: TensorFlow flags
"""
FLAGS = tf.app.flags.FLAGS
tf.app.flags.DEFINE_boolean('save_model', False, 'save model to disk')
tf.app.flags.DEFINE_string('summaries_dir', './logs', 'tens... | 16,002 |
def longitude_validator(value):
"""Perform longitude validation.
"""
valid = -180 < value < 180
if not valid:
raise ValidationError(_('longitude not in range of -90 < value < 90'))
return value | 16,003 |
def getVariables():
"""
Retrieves the variables.json file.
"""
if os.path.exists('variables.json'):
with open('variables.json') as jsonFile:
variables = json.loads(jsonFile.read())
return variables
else:
variables = {}
variables['path'] = ''
r... | 16,004 |
def stop_random_tasks(
cluster: str,
task_count: int = None,
task_percent: int = None,
service: str = None,
reason: str = "Chaos Testing",
configuration: Configuration = None,
secrets: Secrets = None,
) -> List[AWSResponse]:
"""
Stop a random number of tasks based on given task_count... | 16,005 |
def test_create_batch_multi_record_update_fails(shared_zone_test_context):
"""
Test recordsets with multiple records cannot be edited in batch (relies on config, skip-prod)
"""
client = shared_zone_test_context.ok_vinyldns_client
ok_zone = shared_zone_test_context.ok_zone
# record sets to setup... | 16,006 |
def usage():
"""
Print short help meesage.
"""
print('Usage:')
print(' ' + sys.argv[0] + ' --help')
print(' ' + sys.argv[0] + ' [options] /src/dir/path /dst/dir/path')
print('Options:')
print(' --move move files (will remove source files);')
print(' --quiet be quiet;')
... | 16,007 |
def plot_confusion_matrix(conf_matrix: np.ndarray, cmap: str = 'bwr',
ax=None, show: bool = False,
title: str = 'Confusion matrix') -> None:
"""Plots a confusion matrix.
Args:
conf_matrix (np.ndarray): confusion matrix.
cmap (str, optional): c... | 16,008 |
def _build_colormap(data, hue, palette, order):
"""Builds a colormap."""
if hue is None:
color_map = {}
else:
if palette is None:
palette = sns.color_palette()
if order is None:
order = data[hue].unique()
color_map = OrderedDict(zip(order, palette))... | 16,009 |
def f_is_oword(*args):
"""
f_is_oword(F, arg2) -> bool
See 'is_oword()'
@param F (C++: flags_t)
"""
return _ida_bytes.f_is_oword(*args) | 16,010 |
def aggregate_CSV_files(data_path):
""" Aggregate the data in CSV files, specified in the config file, into a
single pandas DataFrame object. """
merge_queue = []
for path in data_path:
data_df = pd.read_csv(path, na_values = ['.']);
data_df.index = pd.to_datetime(data_df['DATE'], forma... | 16,011 |
def test_replace_in_list_single():
"""Test the function with a list of strings"""
x = ['aa', 'bb', 'cc']
replace_in_list(x, ['aa', 'bb'], ['dd', 'ee'])
assert x == ['dd', 'ee', 'cc'] | 16,012 |
def dR2(angle: np_float) -> np.ndarray:
"""Derivative of a rotation matrix around the second axis with respect to the rotation angle
Args:
angle: Scalar, list or numpy array of angles in radians.
Returns:
Numpy array: Rotation matrix or array of rotation matrices.
"""
zero = _ze... | 16,013 |
def build_symm_filter_commands(chainfiles, chromref, outpath, cmd, jobcall):
"""
:return:
"""
chromfiles = collect_full_paths(chromref, '*.tsv')
assert chromfiles, 'No chromosome files found at location: {}'.format(chromref)
assm_chrom = dict()
for chrf in chromfiles:
assm = os.path.... | 16,014 |
def clear_predecessor(n):
"""
Sets n's predecessor to None
:param n: node on which to call clear_predecessor
:return: string of response
"""
def clear(node):
node.predecessor = None
n.event_queue.put(clear)
resp_header = {"status": STATUS_OK}
return utils.create_request(resp... | 16,015 |
def resource_file():
"""
Create an empty resource file
:return:
"""
def _resource_file(dirname, filename):
filepath = os.path.join(dirname, filename)
open(filepath, 'a').close()
return filepath
return _resource_file | 16,016 |
def batch_generator(X, y, batch_size, samples_per_epoch):
"""Generate mini-batches."""
number_of_batches = int(samples_per_epoch / batch_size)
shuffle_index = np.arange(np.shape(y)[0])
np.random.shuffle(shuffle_index)
X = X[shuffle_index, :]
y = y[shuffle_index]
for i in range(number_of_bat... | 16,017 |
def get_local_variable_influence(model, form_data):
"""
"""
row = format_data_to_row(form_data)
model_obj = read_model(model.path, model.file_type)
df = load_dataset_sample(model.dataset, nrows=50)
df = df[model.dataset.model_columns]
explainer = load_model_explainer_from_obj(model_obj, ... | 16,018 |
def Print(text='', newline=True, colour=None):
"""Handle a line of output to the terminal.
In test mode this is recorded in a list. Otherwise it is output to the
terminal.
Args:
text: Text to print
newline: True to add a new line at the end of the text
colour: Colour to use for... | 16,019 |
def submit_experiment(body, **kwargs):
"""Submit an experiment
:param body: experiment payload
:type body: dict | bytes
:rtype: StatusSerializer
"""
serializer = ExperimentSerializer.from_dict(body)
check_experiment_permission(serializer, kwargs["token_info"])
stub = get_experiments_s... | 16,020 |
def obj_assert_check(cls):
"""
The body of the assert check for an accessor
We allow all versions of add/delete/modify to use the same accessors
"""
if cls in ["of_flow_modify", "of_flow_modify_strict",
"of_flow_delete", "of_flow_delete_strict",
"of_flow_add"]:
... | 16,021 |
def ripemd160(data: List[int]) -> List[int]:
"""
:param data:
:return:
"""
try:
bytes_data = bytes(data)
except TypeError:
raise NativeContractException
digest = hashlib.new("ripemd160", bytes_data).digest()
padded = 12 * [0] + list(digest)
return list(bytearray(byte... | 16,022 |
def has_duplicates(s:list) -> dict:
"""Returns True if any element appears more than once in a sequence."""
d = dict()
for char in s:
if char in d:
return True
d[char] = 1
return False | 16,023 |
def test_slicing_on_instances_3():
"""
Like `test_slicing_on_instances_2` but uses a compound model that does not
have any invalid slices due to the resulting model being invalid
(originally test_slicing_on_instances_2 passed without any
ModelDefinitionErrors being raised, but that was before we pre... | 16,024 |
def get_from_chain(J, domain, nof_coefficients, ncap=10000, disc_type='sp_quad', interval_type='lin',
mapping_type='lan_bath', permute=None, residual=True, low_memory=True, stable=False,
get_trafo=False, force_sp=False, mp_dps=30, sort_by=None, **kwargs):
"""
Returns st... | 16,025 |
def factory(kernel_type, cuda_type=None, gpu_mode=None, *args, **kwargs):
"""Return an instance of a kernel corresponding to the requested kernel_type"""
if cuda_type is None:
cuda_type = default.dtype
if gpu_mode is None:
gpu_mode = default.gpu_mode
# turn enum string to enum object
... | 16,026 |
def init_scaler(
scaler_parameters: Dict, fit_data: np.ndarray,
) -> Union[MinMaxScaler, StandardScaler, RobustScaler]:
"""Initialize and return scaler.
Args:
scaler_parameters: Parameters of scaler.
fit_data: Data to be fit.
Returns:
Selected scaler.
""... | 16,027 |
def get_dynamic_resource(previous_length: str):
"""Get the job with job_name.
Returns:
None.
"""
name_to_node_usage = redis_controller.get_resource_usage(
previous_length=int(previous_length)
)
return name_to_node_usage | 16,028 |
def MoveAndMerge(src_dir, dst_dir):
"""Moves data files from src_dir and merges with data files on dst_dir"""
file_utils.TryMakeDirs(os.path.join(dst_dir, ATT_DIR_NAME))
for att_name in os.listdir(os.path.join(src_dir, ATT_DIR_NAME)):
att_src_path = os.path.join(src_dir, ATT_DIR_NAME, att_name)
att_dst_p... | 16,029 |
def _margo_bin(exe=""):
"""Returns the path of the margo executable.
"""
return gs.home_path("bin", exe or INSTALL_EXE) | 16,030 |
def csl_density(basis, mini_cell, plane):
"""
returns the CSL density of a given plane and its d_spacing.
"""
plane = np.array(plane)
c = csl_vec(basis, mini_cell)
h = np.dot(c.T, plane)
h = smallest_integer(h)[0]
h = common_divisor(h)[0]
g = np.linalg.inv(np.dot(c.T, c))
h_norm ... | 16,031 |
def _concatenate_multiple_freq(at_cache, multi_rt_array, multi_time_array,
inplace=False, tick_time_field='tradeTime'):
"""
Concatenate multiple frequency data.
Args:
at_cache(dict): current at multiple cache data
multi_rt_array(matrix): multiple real-time dat... | 16,032 |
def mw_Av():
"""Build the A_V attenuation by the MW towards M31."""
curve = SF11ExtinctionCurve()
ratio = curve['Landolt V'] # A_V / E(B-V) from T6 of SF2011
return ratio * 0.07 | 16,033 |
def load_model(model_uri):
"""
Load an H2O model from a local file (if ``run_id`` is ``None``) or a run.
This function expects there is an H2O instance initialised with ``h2o.init``.
:param model_uri: The location, in URI format, of the MLflow model. For example:
- ``/Users/me/pa... | 16,034 |
def load_data(filenames):
"""Load a single file or sequence of files using skimage.io"""
filenames = [filenames, ] if isinstance(filenames, str) else filenames
loadfunc = tifffile.imread if all(f.lower().endswith("tif")
for f in filenames) else skio.imread
if len(fi... | 16,035 |
def GetInverseMatrix(matrix):
"""
:param matrix: the matrix which will get its inverse matrix
:return: the inverse matrix(two dimensions only)
"""
matrix[0, 0], matrix[1, 1] = -matrix[1, 1], -matrix[0, 0]
matrix = matrix / -(matrix[0, 0] * matrix[1, 1] - matrix[0, 1] * matrix[1, 0])
return m... | 16,036 |
def petsc_memory_stats(log):
"""Return the memory stats section of PETSc's -log_view output as a dictionary."""
# first search for the 'Memory usage' header, then match anything that follows
# after the first line starting with --- up until the first line starting with =====
# re.DOTALL makes . match ne... | 16,037 |
def test_tile_valid_default():
"""Should return a 3 bands array and a full valid mask."""
with COGReader(COG_NODATA) as cog:
# Full tile
data, mask = cog.tile(43, 24, 7)
assert data.shape == (1, 256, 256)
assert mask.all()
tile_bounds = WEB_MERCATOR_TMS.xy_bounds(43, 24,... | 16,038 |
def track2result(bboxes, labels, ids, num_classes):
"""Convert tracking results to a list of numpy arrays.
Args:
bboxes (torch.Tensor | np.ndarray): shape (n, 5)
labels (torch.Tensor | np.ndarray): shape (n, )
ids (torch.Tensor | np.ndarray): shape (n, )
num_classes (int): class... | 16,039 |
def build_result_dataframe(gh, pred, df):
""" Construct a datarame that contain the prediction.
:param gh: the geohas6 code of the prediction
:param pred: numpy array of prediction
:param df: the dataframe used for prediction
:returns: prediction dataframe
:rtype: pandas.core.frame.DataFrame
... | 16,040 |
def find_module(module_name: str, search_paths: Sequence[str | Path] | None = None) -> Path: # noqa: WPS231
"""Find a module in a given list of paths or in `sys.path`.
Parameters:
module_name: The module name.
search_paths: The paths to search into.
Raises:
ModuleNotFoundError: Wh... | 16,041 |
def bandwidth_limited_write(in_file, out_file, kbits_per_sec_str,
post_delay_compression):
"""Bandwidth limited writing.
Args:
in_file: file, the file to read the data from.
out_file: file, the file to write the data to.
kbits_per_sec_str: string, the bandwidth speed.
po... | 16,042 |
def test_logsources_type(self):
"""
Comprobacion de que el tipo de la fuente de seguridad coincide con su asociado
Returns:
"""
log_source = LogSources.objects.get(Type="Iptables")
self.assertEqual(log_source.get_type(), "Iptables") | 16,043 |
def day_1_puzzle_1_solution() -> int:
"""Use this function to return the total fuel requirements for all of the modules.
This function is used for reading the text file of puzzle data and returning the
total amount of fuel that is required for the modules.
:return: the total fuel requirement.
"""
... | 16,044 |
def printc(*args, **kwargs):
""" Analog to the print() function, but accepts Color objects to change colors
Any Color objects will cause the output color to change for subsequent text.
Other objects will be printed as usual.
end is always printed without color, this avoids common problems if t... | 16,045 |
def is_description_style(style):
""" True if this is a style used for Relationships paragraph text """
return is_style(style, 'Normal') or is_style(style, 'Note') | 16,046 |
def inject_content_head_last(html, content):
"""
将文本内容插入到head的尾部
:type html: str
:type content: str
:rtype: str
"""
head_end_pos = html.find("</head") # 找到 </head> 标签结束的位置
if head_end_pos == -1:
# 如果没有 </head> 就不进行插入
return html
return html[:head_end... | 16,047 |
def download(ticker: str,
start: Union[pd.Timestamp, str] = None,
end: Union[pd.Timestamp, str] = None,
frequency: str = "day") -> pd.DataFrame:
"""
Download market data from yahoo finance using the yfinance library from ticker `ticker` from `start` to `end`
at a speci... | 16,048 |
def sortUrlList(urlList):
"""Return ordered url list (localFile, DAP, HTTP, FTP)."""
#localList = [url for url in urlList if os.path.exists(url)]
#dodsList = [url for url in urlList if sciflo.utils.isDODS(url)]
#httpList = [url for url in urlList if not sciflo.utils.isDODS(url) and url.startswith('http'... | 16,049 |
def main(args):
""" Main method
"""
# await/async requires python >= 3.5
if sys.version_info.major < 3 and sys.version_info.minor < 5:
print("Error, language features require the latest python version.")
print("Please install python 3.8 or greater")
return 1
# Force tierie... | 16,050 |
def _ps_run_one_reset_kwargs(G, reset_kwargs: tuple, eval: bool):
"""
Sample one rollout with given init state and domain parameters, passed as a tuple for simplicity at the other end.
This function is used when a minimum number of rollouts was given.
"""
if len(reset_kwargs) != 2:
raise pyr... | 16,051 |
def k8s_config(monkeypatch):
"""Configure k8s for test-runs"""
monkeypatch.setattr(config, "api_server", "http://localhost:8080")
monkeypatch.setattr(config, "verify_ssl", False) | 16,052 |
def test_compiling_a_sequence_not_compiling2(workspace, root, monkeypatch,
exopy_qtbot, dialog_sleep):
"""Test compiling a sequence that can be evaluated but not compiled.
"""
def __raise(*args, **kwargs):
return False, {}, {'test': False}
from exopy... | 16,053 |
def deprecated(message, exception=PendingDeprecationWarning):
"""Throw a warning when a function/method will be soon deprecated
Supports passing a ``message`` and an ``exception`` class
(uses ``PendingDeprecationWarning`` by default). This is useful if you
want to alternatively pass a ``DeprecationWarn... | 16,054 |
def check(config, content, filename):
"""
Run flake8 with the given ``config`` against the passed file.
Returns a ``list`` of :py:class:`flake.Violation`.
"""
with environment(config, content, filename) as env:
out = subprocess.check_output(['flake8',
... | 16,055 |
def x_dot(y):
"""x_dot(y)
Describes the differential equation for position as given in CW 12.
"""
return y | 16,056 |
def get_comp_depends(comp_info, comps):
""" Get comp depends from comp index """
depends = []
for comp in comps:
if comp in comp_info:
depends += comp_info[comp]["dependencies"]
if depends:
depends += get_comp_depends(comp_info, depends)
return list(set(depends)) | 16,057 |
def get_naiveb_model(x_train: pd.DataFrame, y_train: pd.Series) -> GaussianNB:
"""
Trains and returns a naive Bayes model
Data must all be on the same scale in order to use naive Bayes
"""
gnb = GaussianNB(priors=None)
gnb.fit(x_train, y_train)
return gnb | 16,058 |
def bakeClip(blend: Tuple[int, int] = tuple(1, 1),clipIndex: int = 1,keepOriginals: bool = False,name: str = "") -> None:
"""
クリップをベイク処理し、単一のクリップにブレンドするのに使用します。
-----------------------------------------
Flags:
-----------------------------------------
blend ([uint, uint]): ブレンドされるクリップの... | 16,059 |
def updateDF(df, fields, id_patient):
"""
fields is a dictionary of column names and values.
The function updates the row of id_patient with the values in fields.
"""
for key in fields:
df.loc[df["id_patient"] == id_patient, key] = fields[key][0]
return df | 16,060 |
def gcd(num1: int, num2: int) -> int:
"""Computes the greatest common divisor of integers a and b using
Euclid's Algorithm.
"""
while num2 != 0:
num1, num2 = num2, num1 % num2
return num1 | 16,061 |
def application(request):
"""
To use this application, the user must send a POST request with
base64 or form encoded encoded HTML content and the wkhtmltopdf Options in
request data, with keys 'base64_html' and 'options'.
The application will return a response with the PDF file.
"""
if reque... | 16,062 |
async def check_user_name(request):
"""Check if a user exists with provided username."""
log_request(request)
conn = await create_connection()
response = await users_query.users_search_duplicate(
conn, request.args.get("username")
)
conn.close()
return json({"exists": bool(response)}... | 16,063 |
def azip_longest(*aiterables, fillvalue=None):
"""async version of izip_longest with parallel iteration"""
return _azip(*aiterables, fillvalue=fillvalue, stop_any=False) | 16,064 |
def stop_instance(args):
"""
Stops a running instance, shutting it down cleanly, and allows you to restart the instance at a later time
parameter: (string) zone
Name of the zone for request.
parameter: (string) instance
Name of the instance scoping this request.
"""
project = SE... | 16,065 |
def drop_table(name, con):
"""
drop table from database
Parameters
----------
name : string, name of SQL table
con : sqlalchemy.engine.Engine or sqlite3.Connection
Returns
-------
True
Examples
--------
>>> import pandas as pd
>>> from sqlalchemy import create_engi... | 16,066 |
def class_info_interface(**class_name):
"""
Set Class_Name, Class_Index, and DNN Model
\nclass_name (kwargs) : Input Class Name with list type,
if want to set class number, add tuple parameters
like 'class_info_interface(class_name = [list], class_number = [list])'
\nclass_number : Default the n... | 16,067 |
def uint8(value):
"""
Create an SPL ``uint8`` value.
Returns:
Expression: Expression representing the value.
"""
return streamsx.spl.op.Expression('UINT8', int(value)) | 16,068 |
def normalize_data(x_train, x_test, x_val=None):
"""normalize input to zero mean one std assuming x_train, x_test are torch Tensors"""
m = x_train.mean(0)
s = x_train.std(0)
x_train -= m
x_test -= m
if x_val: x_val -= m
cols = []
cols0 = []
for i in range(s.size(0)):
if s[i] ... | 16,069 |
def print_table(log_results,
platform_width = 0,
build_failures_width = 0,
test_failures_width = 0,
successful_width = 0,
space_char = " ",
list_separator = DEFAULT_LIST_SEPARATOR):
"""Print out a table in the requested format (text or markdown).... | 16,070 |
def test_two_tags_unshared_images(default_tag_policy, initialized_db):
"""
Repository has two tags with no shared images between them.
"""
with assert_gc_integrity():
repository = create_repository(latest=["i1", "i2", "i3"], other=["f1", "f2"])
delete_tag(repository, "latest")
as... | 16,071 |
def is_valid(filepath, digest, hashAlgo='md5'):
"""Verify the integrity of a file against a hash value."""
assert(isinstance(digest, str))
res = calculate(filepath, hashAlgo)
LOG.debug('Calculated digest: '+res)
LOG.debug(' Original digest: '+digest)
return res is not None and res == digest | 16,072 |
def align_reconstruction_to_pdr(reconstruction, data):
"""
leveling and scaling the reconstructions to pdr
"""
if reconstruction.alignment.aligned:
return reconstruction
if not data.pdr_shots_exist():
return reconstruction
pdr_shots_dict = data.load_pdr_shots()
X, Xp = [],... | 16,073 |
def test_validation_check_service_account_removed(
invalid_service_account_not_exist,
valid_google_project_patcher,
db_session,
cloud_manager,
):
"""
Test that an invalid service account whose policy does not exist is
removed from the database
"""
(
fence.scripting.google_mo... | 16,074 |
def checkLengthSmaller(op, graph, frm, to):
"""
Confirm resulting video has less frames that source.
:param op:
:param graph:
:param frm:
:param to:
:return:
@type op: Operation
@type graph: ImageGraph
@type frm: str
@type to: str
... | 16,075 |
def enable_log(fmt='[%(asctime)s] [%(process)5s] %(levelname)s %(module)s %(name)s %(message)s',
enable_color=True, filename=None):
"""
Clears all log handlers, and adds color handler and/or file handlers
:param fmt: logging format string
:param enable_color: True to enable
:param fi... | 16,076 |
def build_con_and_ds(dataset: str):
"""
Builds test connector and test datasource for testing with API key
Leave this function in if ever want to run tests without skipping
due to there being no Bearer tokens
How to use:
Replace build_ds function with this one in test_aircall file
Be sure t... | 16,077 |
def _get_search_str_regex_main_body(join_with, last_date):
"""Returns something like:
(t1[0-5]\d\d\d\d|t160[0-2]\d\d|t16030\d|t16031[0-3])"""
todo_date = _get_todo_date(last_date + timedelta(1))
# yrs = _make_last_digit_all_values_less_last_digit(todo_date[:3])
# search_substrs = [yrs[-1]] #Only go ... | 16,078 |
def _remove_existing_tmp_file(tmp_file):
"""Make sure the temporary file is removed."""
if os.path.isfile(tmp_file):
os.remove(tmp_file) | 16,079 |
def test_misc():
"""Generic tests for exceptional cases that the parser needs to take into
consideration.
"""
# The atomic number is given in the NAT convention
filepath = "tests/data/misc/nat/HfS2_PBE0D3_ZD_fc3_supercell-00497.o"
archive = parse(filepath)
asserts_basic(archive)
asserts_... | 16,080 |
def is_palindrome_recursive(text, left=None, right=None):
"""time complexity: O(1) because you are checking which conditional will run, which does not involve any loops
text = str
left = int
right = int"""
if len(text) == 0:
return True
given = get_letters(text)
if left is None and r... | 16,081 |
def bcSet1():
"""
set boundary condition
""" | 16,082 |
def folder_command(args):
"""
Edit the folder structure of the todo list.
usage: todone folder <command> <folder(s)>
Valid commands are:
new create a new folder with the given name
rename rename an existing folder
delete remove a folder
list list all folders
"... | 16,083 |
def flip(m, axis=None):
"""Reverses the order of elements in an array along the given axis.
The shape of the array is preserved, but the elements are reordered.
Parameters
----------
m : array_like
Input array.
axis : None or int or tuple of ints, optional
Axis or axes along wh... | 16,084 |
def radius_of_gyration(pos):
"""
Radius of gyration of a group of positions.
Does not account for periodic boundaries.
"""
com = np.mean(pos, axis = 0)
delta = pos - com
rgv = np.sqrt(np.sum(delta**2, axis = 0) / len(pos))
return np.linalg.norm(rgv) | 16,085 |
def get_metadata_for_list(commit_range, git_dir=None, count=None,
series=None, allow_overwrite=False):
"""Reads out patch series metadata from the commits
This does a 'git log' on the relevant commits and pulls out the tags we
are interested in.
Args:
commit_range (st... | 16,086 |
def side_seperator(lsep,rsep):
"""
To have a custom side lined formatter.
A side-lined formatter is:
`[DATE] SEP "L_SEP" EVENT "R_SEP" LOG`
`loggy.side_seperator(lsep="||",rsep="||") # Default vals`
"""
fmt['ls']=lsep
fmt['rs']=rsep
return fmt | 16,087 |
def clear():
"""Clear the terminal."""
os.system("cls" if os.name == "nt" else "clear") | 16,088 |
def _semi_implicit_euler(ode_fun, jac_fun, y_olds, t_old,
f_old,dt, args, solver_parameters, J00, I):
"""
Calculate solution at t_old+dt using the semi-implicit Euler method.
Based on Section IV.9.25 of Ref II.
"""
y_older, y_old = y_olds
je_tot = 0
if(f_old is None)... | 16,089 |
def make_sequential(layer_configs, input):
"""Makes sequential layers automatically.
Arguments:
layer_configs: An OrderedDict that contains the configurations of a
sequence of layers. The key is the layer_name while the value is a dict
contains hyper-parameters needed to instantiate the c... | 16,090 |
def Pol_Dyn_ExploreWithNUTS(resultsList,totalSimDays=1000,numDaysRemain=1000,\
totalBudget=1000,numBudgetRemain=1000,policyParamList=[0],startDay=0):
"""
Grab intermediate and end node distribtuions via NUTS. Identify intermediate node
sample variances. Pick an intermediate node,... | 16,091 |
def calc_cos_t(hb_ratio, d, theta_s_i, theta_v_i, relative_azimuth):
"""Calculate t cossine.
Args:
hb_ratio (int): h/b.
d (numpy array): d.
theta_s_i (numpy array): theta_s_i.
theta_v_i (numpy array): theta_v_i.
relative_azimuth (numpy array): relative_azimuth.
Retu... | 16,092 |
def step_impl(context):
"""Go through responses and store any with HTTP protocol errors
(as caught by Requests) into the database
"""
new_findings = 0
for response in context.responses:
if response.get('server_protocol_error') is not None:
if fuzzdb.known_false_positive(context,... | 16,093 |
def guild_only() -> Callable:
"""A decorator that limits the usage of a slash command to guild contexts.
The command won't be able to be used in private message channels.
Example
---------
.. code-block:: python3
from discord import guild_only
@bot.slash_command()
... | 16,094 |
def truncate(text, words=25):
"""Remove tags and truncate text to the specified number of words."""
return " ".join(re.sub("(?s)<.*?>", " ", text).split()[:words]) | 16,095 |
def _run_query_create_log(query, client, destination_table=None):
"""
Runs BigQuery queryjob
:param query: Query to run as a string
:param client: BigQuery client object
:return: QueryJob object
"""
# Job config
job_config = bigquery.QueryJobConfig()
if destination_table is not Non... | 16,096 |
def sys_update_char(
asciiCode: int,
fontx: int,
fonty: int,
img: tcod.image.Image,
x: int,
y: int,
) -> None:
"""Dynamically update the current font with img.
All cells using this asciiCode will be updated
at the next call to :any:`tcod.console_flush`.
Args:
asciiCode ... | 16,097 |
def read_disparity_gt(filename: str) -> np.ndarray:
"""
reads the disparity files used for training/testing.
:param filename: name of the file.
:return: data points.
"""
points = []
with open(filename, 'r') as file:
for line in file:
line = line.split(' ')
fra... | 16,098 |
def test_q_as_field():
"""
Test that passing in water discharge as a grid field results in self.q
holding correct values
"""
#set up a 5x5 grid with one open outlet node and low initial elevations.
nr = 5
nc = 5
mg = RasterModelGrid((nr, nc), 10.0)
mg.add_zeros('node', 'topographic... | 16,099 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.