content stringlengths 22 815k | id int64 0 4.91M |
|---|---|
def Str(*args):
"""(s1, s2, ...) -> match s1 or s2 or ..."""
if len(args) == 1:
return Str1(args[0])
return Expression.Alt(tuple(map(Str, args))) | 10,400 |
def image_noise_gaussian(image):
"""
Adds Gaussian noise to the provided image
"""
float_img = image.astype(np.float)
gauss = np.random.normal(0.0, 4.0, (IMG_SIZE, IMG_SIZE, IMG_CHANNELS))
gauss = gauss.reshape(IMG_SIZE, IMG_SIZE, IMG_CHANNELS).astype(np.float)
result = float_img + gauss
... | 10,401 |
def argsort(x: T.FloatTensor, axis: int = None) -> T.LongTensor:
"""
Get the indices of a sorted tensor.
If axis=None this flattens x.
Args:
x: A tensor:
axis: The axis of interest.
Returns:
tensor (of ints): indices of sorted tensor
"""
if axis is None:
re... | 10,402 |
def dirPickledSize(obj,exclude=[]):
"""For each attribute of obj (excluding those specified and those that start with '__'),
compute the size using getPickledSize(obj) and return as a pandas Series of KBs"""
return pd.Series({o:getPickledSize(getattr(obj, o))/1024. for o in dir(obj) if not np.any([o[:2]=='_... | 10,403 |
def audit_log() -> Any:
"""
List all events related to the connected member.
"""
if "member_id" not in session:
abort(404)
return render_template(
"audit_log.html",
full_audit_log=fetch_audit_log(session["member_id"]),
) | 10,404 |
def load_comparisonXL(method, evaluate="train", dropna=True):
"""Load comparison table."""
if evaluate == "test":
e = "['Test']"
elif evaluate == "in bag":
e = "['In Bag']"
elif evaluate == "out of bag":
e = "['Out of Bag']"
else:
e = "['Train']"
# Import methods... | 10,405 |
def display_countries():
"""
Display all countries in Yahoo Finance data. [Source: Finance Database]
"""
for country in financedatabase_model.get_countries():
print(country) | 10,406 |
def verify_user_password(user: User, password: str) -> bool:
"""Verify User's password with the one that was given on login page."""
return pwd_context.verify(password, user.password) | 10,407 |
def __valid_ddb_response_q(response):
"""private function to validate a given DynamoDB query response."""
if 'ResponseMetadata' in response:
if 'HTTPStatusCode' in response['ResponseMetadata']:
if response['ResponseMetadata']['HTTPStatusCode'] == 200:
return True
return F... | 10,408 |
def GetPipelineResultsPathInGCS(artifacts_path):
"""Gets a full Cloud Storage path to a pipeline results YAML file.
Args:
artifacts_path: string, the full Cloud Storage path to the folder containing
pipeline artifacts, e.g. 'gs://my-bucket/artifacts'.
Returns:
A string representing the full Cloud ... | 10,409 |
def Pnm_p(n, m, x):
"""Eq:II.77 """
return lpmn(m, n, x)[1][-1, -1] | 10,410 |
def _remove_empty_subspace(subspaces, n_clusters, m, P, centers, labels, scatter_matrices):
"""
Check if after rotation and rearranging the dimensionalities a empty subspaces occurs. Empty subspaces will be
removed for the next iteration. Therefore all necessary lists will be updated.
:param subspaces: ... | 10,411 |
def _VerifyExtensionHandle(message, extension_handle):
"""Verify that the given extension handle is valid."""
if not isinstance(extension_handle, FieldDescriptor):
raise KeyError('HasExtension() expects an extension handle, got: %s' %
extension_handle)
if not extension_handle.is_extension... | 10,412 |
def main():
"""Evaluate model performance
"""
# construct the argument parse and parse the arguments
args = argparse.ArgumentParser()
args.add_argument("-i", "--input", required=True, help="path to input directory of images")
args.add_argument("-m", "--model", required=True, help="path to input ... | 10,413 |
def main():
"""
Main function
"""
concrete_utim = None
cm1 = None
session_key = None
logging.info("INIT rxQ")
rx_queue = queue.Queue()
logging.info("INIT txQ")
tx_queue = queue.Queue()
try:
cm1 = ConnectivityManager()
cm1.connect(dl_type=DataLinkManager.TYP... | 10,414 |
def _get_citekeys_action(elem, doc):
"""
Panflute action to extract citationId from all Citations in the AST.
"""
if not isinstance(elem, pf.Citation):
return None
manuscript_citekeys = global_variables["manuscript_citekeys"]
manuscript_citekeys.append(elem.id)
return None | 10,415 |
def average_gradients(tower_grads):
"""Calculate the average gradient for each shared variable across all towers.
Note that this function provides a synchronization point across all towers.
Args:
tower_grads: List of lists of (gradient, variable) tuples. The outer list
is over individual gra... | 10,416 |
def compute_neighbours_probability_matrix(n_matrix, src, d_matrix, sigma_neigh):
"""Compute neighbours' probability matrix.
Parameters
-----------
n_matrix : :py:class:`~numpy.ndarray` of :py:class:`~int`, shape (n_verts, n_neigh_max)
The sets of neighbours.
src : :py:class:`~numpy.ndarray... | 10,417 |
def ar_cosmap(inmap):
"""
Get the cosine map and off-limb pixel map using WCS.
Generate a map of the solar disk that is 1 at disk center and goes radially outward as the cos(angle to LOS), which
is = 2 at 60 degrees from LOS.
Other outputs:
- rrdeg: gives degrees from disk center
- offlimb: ... | 10,418 |
def as_array(request: SubRequest) -> bool:
"""
Boolean fixture to support ExtensionDtype _from_sequence method testing.
"""
b = request.param
assert isinstance(b, bool)
return b | 10,419 |
def test_day_generation_empty():
"""Test that empty day generation is as expected"""
cal = models.Calendar()
cal.title = "Test"
cal.start_date = date(2021, 1, 4)
cal.end_date = date(2021, 1, 15)
cal.monday = True
cal.tuesday = True
cal.wednesday = True
cal.thursday = True
cal.fr... | 10,420 |
def gram_matrix(x):
"""Create the gram matrix of x."""
b, c, h, w = x.shape
phi = x.view(b, c, h * w)
return phi.bmm(phi.transpose(1, 2)) / (c * h * w) | 10,421 |
def create_presentation(path):
"""Creates ppt report from files in the specified folder. """
import os
import pandas as pd
from datetime import date
from pptx import Presentation
from pptx.util import Inches, Pt
report = Presentation()
#report = Presentation('test_data//templates//ppt_t... | 10,422 |
def flake8_package():
"""Style only checks files that have been modified. This fixture makes a small
change to the ``flake8`` mock package, yields the filename, then undoes the
change on cleanup.
"""
repo = spack.repo.Repo(spack.paths.mock_packages_path)
filename = repo.filename_for_package_name... | 10,423 |
async def overview(ctx):
"""Describe the rules of the dungeon"""
rules = """
I am Alaveus. For the meager price of 10 chalupas,
I can transport you deep into the heart of a mythic dungeon
where treasure and glory await those who dare enter!
"""
await ctx.send(rules) | 10,424 |
def targz_pack(tgz_name: Path, source_path: Path):
"""
Create a new .tar.gz from the specified folder
Examples:
history/current -> history/current.tar.gz
history/generated/current -> history/generated/current.tar.gz
"""
with tarfile.open(tgz_name, "w:gz") as tar:
tar.add(source_pa... | 10,425 |
def publish_engine_py(dirs):
""" Publish the Python RESTler engine as .py files.
Will also do a quick compilation of the files to verify that no exception occurs
"""
# Copy files to a build directory to test for basic compilation failure
print("Testing compilation of Python files...")
try:
... | 10,426 |
async def async_setup_sdm_entry(
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
) -> None:
"""Set up the client entities."""
device_manager: DeviceManager = hass.data[DOMAIN][DATA_DEVICE_MANAGER]
entities = []
for device in device_manager.devices.values():
... | 10,427 |
def wall_clock_time_fedavg_vs_fedfs() -> None:
"""Comparision of FedAvg vs FedFS."""
bar_chart(
y_values=[
np.array(
[
RESULTS_WALL_CLOCK_TIME["fedavg-14"],
RESULTS_WALL_CLOCK_TIME["fedavg-16"],
]
),
... | 10,428 |
def assign(ctx):
"""Assign spatial objects to brain regions"""
pass | 10,429 |
def connect_db(app):
"""Connect to our database"""
db.app = app
db.init_app(app) | 10,430 |
def open_file(path, mode):
"""
Attempts to open file at path.
Tried up to max_attempts times because of intermittent permission errors on Windows
"""
max_attempts = 100
f = None
for _ in range(max_attempts):
try:
f = open(path, mode)
except PermissionError:
... | 10,431 |
def get_sts_token(current_refresh_token):
"""
Retrieves an authentication token.
:param current_refresh_token: Refresh token retrieved from a previous authentication, used to retrieve a
subsequent access token. If not provided (i.e. on the initial authentication), the password is used.... | 10,432 |
async def device_climate_fan(device_climate_mock):
"""Test thermostat with fan device."""
return await device_climate_mock(CLIMATE_FAN) | 10,433 |
def make_training_config(args):
""" Create training config by parsing args from command line and YAML config file, filling the rest with default values.
Args
args : Arguments parsed from command line.
Returns
config : Dictionary containing training configuration.
"""
# Parse the configuration file.
config = {... | 10,434 |
def main():
"""Main"""
# Create cache dir if it does not exist
cachedir = '%s/cache' % os.path.dirname(os.path.realpath(__file__))
if not os.path.exists(cachedir):
os.makedirs(cachedir)
# Check if MANAGED_INSTALL_REPORT exists
if not os.path.exists(MANAGED_INSTALL_REPORT):
print... | 10,435 |
def main():
"""Generates a results file with the entities names
"""
#process_results_individual('corpora/go_phenotype_xml_100/', 'results/model_ontologies_go_phenotype_results_100.txt',
# 'results/go_phenotype_100_relations_names.tsv', 'results/go_phenotype_100_relations_ident... | 10,436 |
def _crown_relu_relaxer(inp: Bound) -> Tuple[LinFun, LinFun]:
"""Obtain the parameters of a linear ReLU relaxation as in CROWN.
This relaxes the ReLU with the adaptive choice of lower bounds as described
for CROWN-ada in https://arxiv.org/abs/1811.00866.
Args:
inp: Input to the ReLU.
Returns:
lb_lin... | 10,437 |
def run_single_measurement(model_name, produce_model, run_model, teardown, inp, criterion, extra_params, use_dtr, use_profiling):
"""
This function initializes a model and performs
a single measurement of the model on the given input.
While it might seem most reasonable to initialize
the model outs... | 10,438 |
def make_noise(fid, snr, decibels=True):
"""Given a synthetic FID, generate an array of normally distributed
complex noise with zero mean and a variance that abides by the desired
SNR.
Parameters
----------
fid : numpy.ndarray
Noiseless FID.
snr : float
The signal-to-noise ... | 10,439 |
def viterbi(prob_matrix):
""" find the most likely sequence of labels using the viterbi algorithm on prob_matrix """
TINY = 1e-6 # to avoid NaNs in logs
# if prob_matrix is 1D, make it 2D
if len(np.shape(prob_matrix)) == 1:
prob_matrix = [prob_matrix]
length = len(prob_... | 10,440 |
def disp_test_res(result_list, base_list, full=True, paired_test=True):
"""
Read in a result list and trace out the disp_curve_list
for different methods
"""
# initialize
constraint = "DP"
err_bar = False
# First calcuate the baseline loss vector
base_res = base_list[0]
dataset ... | 10,441 |
def slsn_constraint(parameters):
"""
Place constraints on the magnetar rotational energy being larger than the total output energy,
and the that nebula phase does not begin till at least a 100 days.
:param parameters: dictionary of parameters
:return: converted_parameters dictionary where the viola... | 10,442 |
def grover_circuit(n,o,iter):
"""Grover Search Algorithm
:param n: Number of qubits (not including ancilla)
:param o: Oracle int to find
:return qc: Qiskit circuit
"""
def apply_hadamard(qc, qubits,a=None) -> None:
"""Apply a H-gate to 'qubits' in qc"""
for q in qubits:
... | 10,443 |
def reference_cluster(envs, in_path):
"""
Return set of all env in_paths referencing or
referenced by given in_path.
>>> cluster = sorted(reference_cluster([
... {'in_path': 'base', 'refs': []},
... {'in_path': 'test', 'refs': ['base']},
... {'in_path': 'local', 'refs': ['test']... | 10,444 |
def scheme_apply(procedure, args, env):
"""Apply Scheme PROCEDURE to argument values ARGS in environment ENV."""
if isinstance(procedure, PrimitiveProcedure):
return apply_primitive(procedure, args, env)
elif isinstance(procedure, UserDefinedProcedure):
new_env = make_call_frame(procedure, a... | 10,445 |
def _validate_num_clusters(num_clusters, initial_centers, num_rows):
"""
Validate the combination of the `num_clusters` and `initial_centers`
parameters in the Kmeans model create function. If the combination is
valid, determine and return the correct number of clusters.
Parameters
----------
... | 10,446 |
def urlinline(filename, mime=None):
"""
Load the file at "filename" and convert it into a data URI with the
given MIME type, or a guessed MIME type if no type is provided.
Base-64 encodes the data.
"""
infile = open(filename, 'rb')
text = infile.read()
infile.close()
enc = b64.stand... | 10,447 |
def parse_matching_criteria(filters, filter_operator):
"""
build the filter criteria, if present
:param filters:field opr value[;]...
:param filter_operator: any|all
:return dictionary of parsed filter settings, True/False for "all"/"any" setting
"""
LOG.debug("%s %s", filters, filter_operat... | 10,448 |
def star_rating(new_rating=None, prev_rating=None):
"""
Generates the query to update the product's star ratings. Inc method is
from https://docs.mongodb.com/manual/reference/operator/update/inc/
"""
add_file = {
1: {"one_star": 1},
2: {"two_stars": 1},
3: {"three_stars": 1},... | 10,449 |
def import_references(directory="./", disable_progress_bar=False):
"""
Import references file.
Args:
directory (str): Directory where the file is located.
scopus_file (str): Name of the file.
Returns:
None
"""
documents = read_all_records(directory)
#
referenc... | 10,450 |
def bsp_split_recursive(
node: tcod.bsp.BSP,
randomizer: Optional[tcod.random.Random],
nb: int,
minHSize: int,
minVSize: int,
maxHRatio: int,
maxVRatio: int,
) -> None:
"""
.. deprecated:: 2.0
Use :any:`BSP.split_recursive` instead.
"""
node.split_recursive(
nb... | 10,451 |
def add_rpaths(env, install_off, set_cgo_ld, is_bin):
"""Add relative rpath entries"""
if GetOption('no_rpath'):
if set_cgo_ld:
env.AppendENVPath("CGO_LDFLAGS", env.subst("$_LIBDIRFLAGS "),
sep=" ")
return
env.AppendUnique(RPATH_FULL=['$PREFIX/lib64'... | 10,452 |
def check_encoder(value: EncoderArg) -> EncoderFactory:
"""Checks value and returns EncoderFactory object.
Returns:
d3rlpy.encoders.EncoderFactory: encoder factory object.
"""
if isinstance(value, EncoderFactory):
return value
if isinstance(value, str):
return create_encode... | 10,453 |
def mypy(ctx):
"""Runs mypy against the codebase"""
ctx.run("mypy --config mypy.ini") | 10,454 |
def apply_dep_update(recipe_dir, dep_comparison):
"""Upodate a recipe given a dependency comparison.
Parameters
----------
recipe_dir : str
The path to the recipe dir.
dep_comparison : dict
The dependency comparison.
Returns
-------
update_deps : bool
True if de... | 10,455 |
def allowed_task_name(name: str) -> bool:
"""Determine whether a task, which is a 'non-core-OSCAL activity/directory is allowed.
args:
name: the task name which is assumed may take the form of a relative path for task/subtasks.
Returns:
Whether the task name is allowed or not allowed (inte... | 10,456 |
def get_ignored_classes(uppercase, lowercase, digit):
"""
get tuple of ignored classes based on selected classes
:param
uppercase: whether to keep uppercase classes
:param
lowercase: whether to keep lowercase classes
:param
digit: whether to keep digit classes
:return:
... | 10,457 |
def unscaled_prediction_rmse(model, input_tensor, label_tensor, scalar, loading_length=0, return_loading_error=False,
device=None):
"""
Prediction RMSE.
:param model: model
:param input_tensor: input tensor
:param label_tensor: label tensor
:param scalar: scalar fo... | 10,458 |
def __copyList__(fromList, initialValues = None):
"""
Returns a copy of the provided list. Initial values must either be a single value, or
a list of exactly the same size as the provided list.
"""
if __isListType__(fromList) is False:
raise ValueError('The provided value to copy was not a l... | 10,459 |
def build_or_pattern(patterns, escape=False):
"""Build a or pattern string from a list of possible patterns
"""
or_pattern = []
for pattern in patterns:
if not or_pattern:
or_pattern.append('(?:')
else:
or_pattern.append('|')
or_pattern.append('(?:%s)' % r... | 10,460 |
def concatenate_and_process_data(
data_consent: pd.DataFrame,
data_noconsent: pd.DataFrame,
conversion_column: str = CONVERSION_COLUMN,
drop_columns: Tuple[Any, ...] = DROP_COLUMNS,
non_dummy_columns: Tuple[Any, ...] = NON_DUMMY_COLUMNS
) -> Tuple[pd.DataFrame, pd.DataFrame]:
"""Concatenates conse... | 10,461 |
def plot(nRows=1, nCols=1, figSize=5):
"""
Generate a matplotlib plot and axis handle
Parameters
-----------------
nRows : An int, number of rows for subplotting
nCols : An int, number of columns for subplotting
figSize : Numeric or array (xFigSize, yFigSize). The size of each axis.
"""... | 10,462 |
def make_rate_data(grp, valuevars, query="none == 'All'", data=ob):
"""Filters, Groups, and Calculates Rates
Params:
grp [list]: A list detailing the names of the variables to group by.
valuevars [list]: A list detailing the names of the quantitative
variable summarise and calculate... | 10,463 |
def test_compare_sir_vs_seir(sir_data_wo_policy, seir_data, monkeypatch):
"""Checks if SEIR and SIR return same results if the code enforces
* alpha = gamma
* E = 0
* dI = dE
"""
x_sir, pars_sir = sir_data_wo_policy
x_seir, pars_seir = seir_data
pars_seir["alpha"] = pars_sir["gamma"] ... | 10,464 |
def login(client, username='', password=''):
"""
Log a specific user in.
:param client: Flask client
:param username: The username
:type username: str
:param password: The password
:type password: str
:return: Flask response
"""
user = dict(login=username, password=password)
... | 10,465 |
def test_liststatements():
"""Turn list of dictionaries into list of Statement objects."""
csvrows_list = [
{"shape_id": "@a", "prop_id": "dct:creator", "value_type": "URI"},
{"shape_id": "@a", "prop_id": "dct:subject", "value_type": "URI"},
{"shape_id": "@a", "prop_id": "dct:date", "val... | 10,466 |
def func(x, params):
"""The GNFW radial profile.
Args:
x (:obj:`np.ndarray`): Radial coordinate.
params (:obj:`dict`): Dictionary with keys `alpha`, `beta`, `gamma`, `c500`, and `P0` that defines
the GNFW profile shape.
Returns:
Profile (1d :obj:`np.ndarray`).
... | 10,467 |
def get_double_image_blob(roidb):
"""Builds an input blob from the images in the roidb at the specified
scales.
"""
num_images = len(roidb)
# Sample random scales to use for each image in this batch
scale_inds = np.random.randint(
0, high=len(cfg.TRAIN.SCALES), size=num_images)
proce... | 10,468 |
def test_successful_delete_sbi():
"""Test deleting an SBI successfully""" | 10,469 |
def _readFromSettings(self, key):
"""Loads the settings object associated with the program and
returns the value at the key."""
COMPANY, APPNAME, _ = SELMAGUISettings.getInfo()
COMPANY = COMPANY.split()[0]
APPNAME = APPNAME.split()[0]
settings = QtCore.QSettings(COM... | 10,470 |
def run_overlay_resources_score_motifs(normal_expression_per_tissue_origin_per_TF,
matching_cell_name_representative_dict, motifTFName_TFNames_matches_dict,
cells_assays_dict, cell_tfs, tf_cells, assay_cells_datatypes, header):
"""pairs ... | 10,471 |
def compare(file1, file2):
"""子主函数"""
try:
f1 = 'C:\\Users\\seamus\\Downloads\\configure_data\\' + file1
f2 = 'C:\\Users\\seamus\\Downloads\\configure_data\\' + file2
except Exception as e:
print("Error: " + str(e))
print("Usage : python compareFile.py filename1 filename2")
sys.exit()
if f1 == "" or f2 ... | 10,472 |
def history_directory(repo_loc: str) -> str:
"""Retrieve the directory containing job logs for the specified repository
Parameters
----------
repo_loc : str
FAIR-CLI repository path
Returns
-------
str
location of the job logs directory
"""
return os.path.join(
... | 10,473 |
def test_calc_node_coords_1():
"""
Attempt to run on edges which are out of order.
This should raise an exception.
"""
test_input_tp_as_text = test_1_lines['000000F'] + """\n000000F 000062240:B 000083779:B 000083779 862 0 30696 99.79"""
all_sl = [line.strip().split() for line in test_input_tp_a... | 10,474 |
def validate_input_path(path: str) -> None:
"""
Validate input file/directory path argument, raises a ValueError if the path
is not valid .sql file or directory.
:param path: input path argument specified by the user
"""
if not os.path.isdir(path) and not (os.path.isfile(path)
... | 10,475 |
def _h5_overwrite_array_slice(model, h5_key_pair, slice_tuple, array_slice):
"""Overwrites (updates) a slice of an hdf5 array."""
h5_root = _h5_access(model, h5_key_pair[0], mode = 'a')
dset = h5_root[h5_key_pair[1]]
dset[slice_tuple] = array_slice | 10,476 |
def is_statu(search_data):
"""
判断是否有参数,且为正常还是停用
:param search_data:
:return:
"""
logging.info('is_statu')
if search_data:
if search_data == '正常':
return '1'
elif search_data == '停用':
return '0'
else:
return search_data
else:
... | 10,477 |
def get_mysql_exception(errno, msg, sqlstate=None):
"""Get the exception matching the MySQL error
This function will return an exception based on the SQLState. The given
message will be passed on in the returned exception.
The exception returned can be customized using the
mysql.connector.cust... | 10,478 |
def teardown_module(module):
""" teardown any state that was previously setup with a setup_module
method.
"""
import time
# temp file will be removed soon
time.sleep(1.0)
for p in ["1.zip", "2.zip", "3.zip"]:
try:
os.remove(p)
except:
pass | 10,479 |
def clear_pkt_loss():
"""
:return:
"""
pkt_loss_file_path = os.path.join(os.getcwd(), 'pkt_loss.yaml')
if os.path.isfile(pkt_loss_file_path):
os.remove(pkt_loss_file_path)
return pkt_loss_file_path | 10,480 |
def get_image_resize_transform_steps(config, dataset) -> List:
"""
Resizes the image to a slightly larger square.
"""
assert dataset.original_resolution is not None
assert config.resize_scale is not None
scaled_resolution = tuple(
int(res * config.resize_scale) for res in dataset.... | 10,481 |
def get_page_title(page_src, meta_data):
"""Returns the title of the page. The title in the meta data section
will take precedence over the H1 markdown title if both are provided."""
return (
meta_data['title']
if 'title' in meta_data and isinstance(meta_data['title'], str)
else get_... | 10,482 |
def get_args():
"""引数解析
Returns:
argparse.Namespace: 引数情報
"""
parser = argparse.ArgumentParser(
prog="app.py",
usage="realtime or audio file",
description="detect music change point.",
add_help=True
)
parser.add_argument(
"--cfg", type=str,
... | 10,483 |
def make_3dplot(fname_inp, fname_fig, clim=[None,None], vnames=[], data_processor=None, verbose='debug', **kws):
"""
make 3D plot with a radial and longitudinal cuts
"""
logger.setLevel(getattr(logging, verbose.upper()))
assert len(vnames)>0, ' [-] We need names in vnames!\n'
# we'll obtain:
... | 10,484 |
def saveReplayBuffer():
"""
Flush and save the contents of the Replay Buffer to disk. This is
basically the same as triggering the "Save Replay Buffer" hotkey.
Will return an `error` if the Replay Buffer is not active.
"""
return __createJSON("SaveReplayBuffer", {}) | 10,485 |
def view_or_basicauth(view, request, test_func, realm = "", *args, **kwargs):
"""
This is a helper function used by both 'logged_in_or_basicauth' and
'has_perm_or_basicauth' that does the nitty of determining if they
are already logged in or if they have provided proper http-authorization
and return... | 10,486 |
def evaluate_srl_1step(find_preds_automatically=False, gold_file=None):
"""
Evaluates the network on the SRL task performed with one step for
id + class.
"""
md = Metadata.load_from_file('srl')
nn = taggers.load_network(md)
r = taggers.create_reader(md, gold_file=gold_file)
itd = r.... | 10,487 |
def ring_bells():
"""Rings the school bells in a pattern for the given schedule/time."""
# Need to get the pattern for this time slot and apply it.
curTime = time.strftime("%H:%M")
if curTime not in jsonConfig["schedules"][curSchedule]:
logging.error("Couldn't find time record for time " + curTi... | 10,488 |
def _subtract_background_one_line(data_line, e_off, e_lin, e_quad, width):
"""
Subtract background from spectra in a single line of the image
Parameters
----------
data_line : ndarray
spectra for one line of an image, size NxM, N-the number of
pixels in the line, M - the number of ... | 10,489 |
def extract_module(fo, modname):
"""Extract a single modules test plan documents."""
mod = __import__(modname)
mod_doc(fo, mod) | 10,490 |
def clean_principals_output(sql_result, username, shell=False):
"""
Transform sql principals into readable one
"""
if not sql_result:
if shell:
return username
return [username]
if shell:
return sql_result
return sql_result.split(',') | 10,491 |
def update_globals(column_dict: Dict[str, Tuple[Hashable, Hashable]]) -> None:
"""Update the column names stored in the global variable ``_GLOBVAR``.
Parameters
----------
column_dict: :class:`dict` [:class:`str`, :class:`tuple` [:class:`Hashable`, :class:`Hashable`]]
A dictionary which maps co... | 10,492 |
def table(ctx):
"""CRM configuration for ACL table resource"""
ctx.obj["crm"].res_type = 'acl_table' | 10,493 |
def test_versions(gen3_index):
"""
Test creation of a record and a new version of it
index.py functions tested:
create_record
create_new_version
get_versions
get_latest_version
"""
# put a new record in the index
newrec = gen3_index.create_record(
acl=[... | 10,494 |
def matchesType(value, expected):
"""
Returns boolean for whether the given value matches the given type.
Supports all basic JSON supported value types:
primitive, integer/int, float, number/num, string/str, boolean/bool, dict/map, array/list, ...
"""
result = type(value)
expected = expecte... | 10,495 |
def __find_surplus_locks_and_remove_them(datasetfiles, locks, replicas, source_replicas, rseselector, rule, source_rses, session=None, logger=logging.log):
"""
Find surplocks locks for a rule and delete them.
:param datasetfiles: Dict holding all datasets and files.
:param locks: Dic... | 10,496 |
def csv_logging(record):
"""generate output in csv format"""
csv_record = ('{ts},{si},{di},{sp},{dp},{t},"{p}",{h},{v},"{ha}",'
'"{k}","{e}","{m}","{c}"')
if 'hassh' in record:
hasshType = 'client'
kexAlgs = record['ckex']
encAlgs = record['ceacts']
macAlgs ... | 10,497 |
def harvest_outfile_pass(outtext):
"""Function to read NWChem output file *outtext* and parse important
quantum chemical information from it in
"""
psivar = PreservingDict()
psivar_coord = None
psivar_grad = None
version = ""
module = None
error = "" # TODO (wardlt): The error stri... | 10,498 |
def create_denoising_dataset(epi_path,log_path,acqtimes_path,rot_dir=-1, interactive=True, img_dir=None, slice_indices=None, inner_mask_level=.004):
"""Generates masks and timeseries for analysis.
Parameters
----------
epi_path : str
Path to the phantom data.
log_path : str
Path to ... | 10,499 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.