content stringlengths 35 762k | sha1 stringlengths 40 40 | id int64 0 3.66M |
|---|---|---|
def data_check(data):
"""Check the data in [0,1]."""
return 0 <= float(data) <= 1 | b292ef07a024e53d82e706f0d88d50d6318d6593 | 1,900 |
import re
def tokenize(text):
"""
Tokenization function to process text data
Args:
text: String. disaster message.
Returns:
clean_tokens: list. token list from text message.
"""
url_regex = 'http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+'
# g... | d5ee0929c0b6fad243b87c2b7e82270859b9b3f3 | 1,901 |
def get_symbol_historical(symbol_name):
"""Returns the available historical data for a symbol as a dictionary."""
# Get the data
symbol_data = get_symbol_data(symbol_name)
# Build the response
response = symbol_data.to_dict(orient="records")
return response | 37578652a13ff2b705c46185aba8cd47a73dc6e0 | 1,902 |
def guesses(word):
"""
return all of the first and second order guesses for this word
"""
result = list(known(*first_order_variants(word)))
result.sort()
return result | 9a74372e701d526d74df1df613b8648f47830202 | 1,903 |
def em(X, sf, inits, K, L, n_iter=100, n_inner_iter=50, tol=1e-5, zero_inflated=True):
"""
run EM algorithm on the given init centers
return the clustering labels with the highest log likelihood
"""
# add prepare reduced data here
print("start em algorithm")
res = _em(X, sf, inits, K, L, ... | 7a53c14caf56958fed80241bf347071b84a62280 | 1,904 |
def is_disaggregate(data, raw_fuel_sectors_enduses):
"""TODO: Disaggregate fuel for sector and enduses with floor
area and GVA for sectors and enduses (IMPROVE)
#TODO: DISAGGREGATE WITH OTHER DATA
"""
is_fueldata_disagg = {}
national_floorarea_sector = 0
for region_name in data['lu_reg']:
... | 64111bb23099526aae7e2bf7fa5aefcd6225fd7c | 1,905 |
def update_logger(evo_logger, x, fitness, memory, top_k, verbose=False):
""" Helper function to keep track of top solutions. """
# Check if there are solutions better than current archive
vals = jnp.hstack([evo_logger["top_values"], fitness])
params = jnp.vstack([evo_logger["top_params"], x])
concat... | 8efd1bbc4f0c1cde17e2ef425ae82cf3f5967df3 | 1,906 |
def ae(nb_features,
input_shape,
nb_levels,
conv_size,
nb_labels,
enc_size,
name='ae',
prefix=None,
feat_mult=1,
pool_size=2,
padding='same',
activation='elu',
use_residuals=False,
nb_conv_per_level=1,
batch_norm=None,
... | ab5bbed13e5636ab506612776920eaffa67b8b3e | 1,907 |
import os
import shlex
def read_authorized_keys(username=None):
"""Read public keys from specified user's authorized_keys file.
args:
username (str): username.
returns:
list: Authorised keys for the specified user.
"""
authorized_keys_path = '{0}/.ssh/authorized_keys'.format(os.p... | 700c61571a2526d769492ade4fea4b395ff7d6ae | 1,908 |
import os
def get_fprime_version():
""" Gets the fprime version using setuptools_scm """
# First try to read the SCM version
try:
return get_version(root=os.sep.join([".."] * ROOT_PARENT_COUNT), relative_to=__file__)
# Fallback to a specified version when SCM is unavailable
except LookupE... | 940ab7ffbf57d1f415b03b61b9169c1062fdb5c1 | 1,909 |
def parser_config(p):
"""JLS file info."""
p.add_argument('--verbose', '-v',
action='store_true',
help='Display verbose information.')
p.add_argument('filename',
help='JLS filename')
return on_cmd | ea9e20fd055933d7e1b1b5f92da76875f7f318e6 | 1,910 |
def decentralized_training_strategy(communication_rounds, epoch_samples, batch_size, total_epochs):
"""
Split one epoch into r rounds and perform model aggregation
:param communication_rounds: the communication rounds in training process
:param epoch_samples: the samples for each epoch
:param batch_... | 3a743208af50d7c7865d5d5f86a4f58b0ba98a4d | 1,911 |
def create_config_file_lines():
"""Wrapper for creating the initial config file content as lines."""
lines = [
"[default]\n",
"config_folder = ~/.zettelkasten.d\n",
"\n",
"def_author = Ammon, Mathias\n",
"def_title = Config Parsed Test Title\n",
"def_location_spec... | d0d1057c3f450636279a8df9d4a39977f1eeef42 | 1,912 |
def p_planes_tangent_to_cylinder(base_point, line_vect, ref_point, dist, ):
"""find tangent planes of a cylinder passing through a given point ()
.. image:: ../images/plane_tangent_to_one_cylinder.png
:scale: 80 %
:align: center
Parameters
----------
base_point : point
poin... | e8928e4314cadede97bef977c0348e32832157ad | 1,913 |
def BOPTools_AlgoTools3D_OrientEdgeOnFace(*args):
"""
* Get the edge <aER> from the face <aF> that is the same as the edge <aE>
:param aE:
:type aE: TopoDS_Edge &
:param aF:
:type aF: TopoDS_Face &
:param aER:
:type aER: TopoDS_Edge &
:rtype: void
"""
return _BOPTools.BOPTools_... | 31da8b90e4ad5838b94a0481d937104845de735c | 1,914 |
def create_store_from_creds(access_key, secret_key, region, **kwargs):
"""
Creates a parameter store object from the provided credentials.
Arguments:
access_key {string} -- The access key for your AWS account
secret_key {string} -- The secret key for you AWS account
region {stri... | 8e0ec2a6579a6013d36b6933ee922a406730ee35 | 1,915 |
import abc
def are_objects_equal(object1, object2):
"""
compare two (collections of) arrays or other objects for equality. Ignores nan.
"""
if isinstance(object1, abc.Sequence):
items = zip(object1, object2)
elif isinstance(object1, dict):
items = [(value, object2[key]) for key, va... | 94b4b9a9f42bc8b1dd44d5e010b422082452f649 | 1,916 |
def get_recipes_from_dict(input_dict: dict) -> dict:
"""Get recipes from dict
Attributes:
input_dict (dict): ISO_639_1 language code
Returns:
recipes (dict): collection of recipes for input language
"""
if not isinstance(input_dict, dict):
raise TypeError("Input is not typ... | e710d9629d10897d4aae7bf3d5de5dbbe18196c5 | 1,917 |
def tasks_from_wdl(wdl):
"""
Return a dictionary of tasks contained in a .wdl file.
The values are task definitions within the wdl
"""
return scopes_from_wdl("task", wdl) | 24d302995dcfa274b4b04868f901f832b36ec5cd | 1,918 |
import traceback
import sys
def wrap_parse(content, args):
"""
Wraps a call to `parse` in a try/except block so that one can use a Pool
and still get decent error messages.
Arguments
---------
content: segments are strings
args: a namespace, see `parse`
Returns
-------
parse ... | 2e0a97c363be371fadbbabb22d83e1f4368205ad | 1,919 |
async def get_category_item_route(category_id: CategoryEnum, item_id: ObjectID,
db: AsyncIOMotorClient = Depends(get_database)) -> ItemInResponse:
"""Get the details about a particular item"""
_res = await db[category_id]["data"].find_one({"_id": item_id})
if _res:
... | 4feed87e3948994f8066268820355d9fdfe4999d | 1,920 |
def weighted_SVD(matrix, error=None, full_matrices=False):
"""
Finds the most important modes of the given matrix given the weightings
given by the error.
matrix a horizontal rectangular matrix
error weighting applied to the dimension corresponding to the rows
"""
if type(error) is type... | 5ca0f54af765f0694fb572ee3b82f4d59642bb06 | 1,921 |
def ingredients():
"""Route to list all ingredients currently in the database.
"""
query = request.args.get("q")
ingredients = db.get_ingredient_subset_from_db(query)
return jsonify(ingredients) | 376bcb8e16c0379676f9748f4a2858ea39ca33ab | 1,922 |
def read_h5_particles(particles_file, refpart, real_particles, bucket_length, comm, verbose):
"""Read an array of particles from an HDF-5 file"""
four_momentum = refpart.get_four_momentum()
pmass = four_momentum.get_mass()
E_0 = four_momentum.get_total_energy()
p0c = four_momentum.get_momentum()
... | caaeb89920b3cc9e0b263c9b1fea5fc1615ad8b3 | 1,923 |
import logging
def readAndMapFile(path):
"""
Main file breaker - this takes a given file and breaks it into arbitrary
fragments, returning and array of fragments. For simplicity, this is breaking on
newline characters to start with. May have to be altered to work with puncuation
and/or special ... | 4a542d1a08fcd88a1660de360c15d87949eddf11 | 1,924 |
def fetch_git_logs(repo, from_date, to_date, args): # pragma: no cover
"""Fetch all logs from Gitiles for the given date range.
Gitiles does not natively support time ranges, so we just fetch
everything until the range is covered. Assume that logs are ordered
in reverse chronological order.
"""
cursor = '... | 1164b373e9b8f7186165712f8ac9e5e3d1a1f10f | 1,925 |
import torch
def _gen_bfp_op(op, name, bfp_args):
"""
Do the 'sandwich'
With an original op:
out = op(x, y)
grad_x, grad_y = op_grad(grad_out)
To the following:
x_, y_ = input_op(x, y)
Where input_op(x, y) -> bfp(x), bfp(y)
and input_op_grad(grad_x, grad_y) -> bfp(grad_x), bfp(gr... | d430bd9d090d0a47fa4d6a8c173c77b08e2fdb66 | 1,926 |
def angleaxis_to_rotation_matrix(aa):
"""Converts the 3 element angle axis representation to a 3x3 rotation matrix
aa: numpy.ndarray with 1 dimension and 3 elements
Returns a 3x3 numpy.ndarray
"""
angle = np.sqrt(aa.dot(aa))
if angle > 1e-6:
c = np.cos(angle);
s = np.sin(a... | 57d849f137684824aa23d393802dc247df987b59 | 1,927 |
def sendOrderFAK(self, orderType, price, volume, symbol, exchange, stop=False):
"""发送委托"""
if self.trading:
# 如果stop为True,则意味着发本地停止单
req = {}
req['sid'] = self.sid
if orderType == CTAORDER_BUY:
req['direction'] = '0'
req['offset'] = '0'
elif orderT... | 5b72ab3cdfa0b4412df2861d1e23a4a55f1d7206 | 1,928 |
import itertools
def unique(lst):
"""
:param lst: a list of lists
:return: a unique list of items appearing in those lists
"""
indices = sorted(list(range(len(lst))), key=lst.__getitem__)
indices = set(next(it) for k, it in
itertools.groupby(indices, key=lst.__getitem__))
... | 0848d693681ff0f8bdbc0d0436b3d4450eee781e | 1,929 |
def max_frequency(sig, FS):
"""Compute max frequency along the specified axes.
Parameters
----------
sig: ndarray
input from which max frequency is computed.
FS: int
sampling frequency
Returns
-------
f_max: int
0.95 of max_frequency using cumsum.
"""
f, ... | 19321fb47d47b99138e1d1551f3728df4c2b7370 | 1,930 |
def split(text):
"""Turns the mobypron.unc file into a dictionary"""
map_word_moby = {}
try:
lines = text.split("\n")
for line in lines:
(word, moby) = line.split(" ", 1)
map_word_moby[word] = moby
except IOError as error:
print(f"Failed due to IOError: {... | ba051724f0399e918949c3e8b7fb010e2d87c9f9 | 1,931 |
def report(key_name=None, priority=-1, **formatters):
""" Use this decorator to indicate what returns to include in the report and how to format it """
def tag_with_report_meta_data(cls):
# guard: prevent bad coding by catching bad return key
if key_name and key_name not in cls.return_keys:
... | 3830135de40bdc2a25bd3c6b6cecc194c6dbebac | 1,932 |
import scipy
def calc_momentum_def(x_loc, X, Y, U):
""" calc_momentum_def() : Calculates the integral momentum deficit of scalar field U stored at \
locations X,Y on a vertical line that runs nearest to x_loc. """
U_line, x_line, x_idx_line = get_line_quantity(x_loc, X, Y, U)
y_line = Y[:,x_i... | 7173450ebd779c07a80cef2deb37954ddb7509be | 1,933 |
def display_unit_title(unit, app_context):
"""Prepare an internationalized display for the unit title."""
course_properties = app_context.get_environ()
template = get_unit_title_template(app_context)
return template % {'index': unit.index, 'title': unit.title} | 9d8ffbf0672388bd890aaabb8e5fbdb5e193d3d2 | 1,934 |
def load_user(user_id):
"""Load the user object from the user ID stored in the session"""
return User.objects(pk=user_id).first() | 96df8d5e21f380369ae0c6ccc404a4f7880bf000 | 1,935 |
def get_complex_replay_list():
"""
For full replays that have crashed or failed to be converted
:return:
"""
return [
'https://cdn.discordapp.com/attachments/493849514680254468/496153554977816576/BOTS_JOINING_AND_LEAVING.replay',
'https://cdn.discordapp.com/attachments/49384951468025... | ef5a75a848289ad9c129c2b73a6d6845dcd07cfe | 1,936 |
import json
def parse_registry():
""" Parses the provided registry.dat file and returns a dictionary of chunk
file names and hashes. (The registry file is just a json dictionary containing
a list of file names and hashes.) """
registry = request.values.get("registry", None)
if registry is None:
... | 71d4cd0f2b9fb33b92861feb9ea882fc32ec7234 | 1,937 |
import math
def get_cosine_with_hard_restarts_schedule_with_warmup(optim: Optimizer,
num_warmup_step: float,
num_training_step: int,
num_cycles: float = ... | 5327cb688885c8ecc271156364a06bffedd97775 | 1,938 |
import typing
def home():
"""
Render Homepage
--------------------------------------------------------------
This site should be cached, because it is the main entry point for many users.
"""
bestseller: typing.List[Device] = get_bestsellers()
specialist_manufacturers = Manufacturer.query.... | ca452264e8a10af83e0cc7b5df592a9f618085ad | 1,939 |
def reject_call():
"""Ends the call when a user does not want to talk to the caller"""
resp = twilio.twiml.Response()
resp.say("I'm sorry, Mr. Baker doesn't want to talk to you. Goodbye scum.", voice='woman', language='en-GB')
resp.hangup()
return str(resp) | 743e58b230a3a63df4c3e882139755b8d2c4bc55 | 1,940 |
def table_prep(data, columns=''):
"""
Data processor for table() function.
You can call it separately as well and in
return get a non-prettyfied summary table.
Unless columns are defined, the three first
columns are chosen by default.
SYNTAX EXAMPLE:
df['quality_score'... | a9d3d75d2ac32ddf5ae4d5a17a10974b61c139ee | 1,941 |
def lerp(a,b,t):
""" Linear interpolation between from @a to @b as @t goes between 0 an 1. """
return (1-t)*a + t*b | 12cb8690ba5e5f2a4c08c1cd29d3497513b63438 | 1,942 |
def convert_to_legacy_v3(
game_tick_packet: game_data_struct.GameTickPacket,
field_info_packet: game_data_struct.FieldInfoPacket = None):
"""
Returns a legacy packet from v3
:param game_tick_packet a game tick packet in the v4 struct format.
:param field_info_packet a field info packet i... | 3e00e165233806957a010871c9218b1c02950063 | 1,943 |
import logging
def _load_audio(audio_path, sample_rate):
"""Load audio file."""
global counter
global label_names
global start
global end
logging.info("Loading '%s'.", audio_path)
try:
lbl1=Alphabet[audio_path[-6]]
lbl2 = Alphabet[audio_path[-5]]
except:
lbl1=1 + counter
lbl2=2 + c... | 5e8112c79164c800965f137c83ceb720aab17bdf | 1,944 |
def generate_annotation_dict(annotation_file):
""" Creates a dictionary where the key is a file name
and the value is a list containing the
- start time
- end time
- bird class.
for each annotation in that file.
"""
annotation_dict = dict()
for line i... | f40f210075e65f3dbe68bb8a594deb060a23ad8b | 1,945 |
def ishom(T, check=False, tol=100):
"""
Test if matrix belongs to SE(3)
:param T: SE(3) matrix to test
:type T: numpy(4,4)
:param check: check validity of rotation submatrix
:type check: bool
:return: whether matrix is an SE(3) homogeneous transformation matrix
:rtype: bool
- ``ish... | b4a0467d22940889e3071bf07d4a093d567409f3 | 1,946 |
def _get_stp_data(step_order=STEP_ORDER, n=N_PER_STEP):
"""Returns np.array of step-type enums data for sample data.
Parameters
----------
step_order : list of (int, char)
List of (Cycle number, step type code) for steps in sample procedure.
n : int
Number of datapoints per step.
... | d96a2604ac67e1a84ead39e0d2d39a5c6183a5cd | 1,947 |
def actor_discrete_loss(actions, advantages, logits):
"""
Adapted from: http://inoryy.com/post/tensorflow2-deep-reinforcement-learning/
"""
# sparse categorical CE loss obj that supports sample_weight arg on call()
# from_logits argument ensures transformation into normalized probabilities
weigh... | a1a4cf0967d432655cc0788ad2c20bb0ca861d4f | 1,948 |
from typing import Union
from typing import List
def fuse_stride_arrays(dims: Union[List[int], np.ndarray],
strides: Union[List[int], np.ndarray]) -> np.ndarray:
"""
Compute linear positions of tensor elements
of a tensor with dimensions `dims` according to `strides`.
Args:
dims: ... | 06185cb0bcfccd30e7b006fa8fe4e28a6f5ae7f3 | 1,949 |
def extract_jasmine_summary(line):
"""
Example SUCCESS karma summary line:
PhantomJS 2.1.1 (Linux 0.0.0): Executed 1 of 1 SUCCESS (0.205 secs / 0.001 secs)
Exmaple FAIL karma summary line:
PhantomJS 2.1.1 (Linux 0.0.0): Executed 1 of 1 (1 FAILED) ERROR (0.21 secs / 0.001 secs)
"""
# get tota... | f795ff015555cc3a2bd2d27527ae505a6dde9231 | 1,950 |
import argparse
import inspect
import re
import os
def main( argv ):
"""
Script execution entry point
@param argv Arguments passed to the script
@return Exit code (0 = success)
"""
#-------------------------------------------------------------------------
# BEGIN: Per-... | 3f23b4f4ec29ed563c08432b30bf963739d4f789 | 1,951 |
from xmodule.modulestore.store_utilities import DETACHED_XBLOCK_TYPES
def serialize_item(item):
"""
Args:
item: an XBlock
Returns:
fields: a dictionary of an XBlock's field names and values
block_type: the name of the XBlock's type (i.e. 'course'
or 'problem')
"""
... | 426e5e83644ca2f1a81491e7e0a65a67cca26f15 | 1,952 |
def gen_outfile_name(args):
"""Generate a name for the output file based on the input args.
Parameters
----------
args : argparse
argparse object to print
"""
return args.outfile + gen_identifier(args) | 6a91c26de3ae3ec39a2095434ccc18feb9fed699 | 1,953 |
def check_vg_tags(game_id):
"""Returns a user's tags."""
if game_id:
user_id = session.get('user_id')
user_query = VgTag.query.join(Tag).filter(Tag.user_id == user_id) # Only display user's tags for a specific game.
vg_tags = user_query.filter(VgTag.game_id == game_id).all()
... | 1eed3e9a58a21a79ae5502a67bde0c409af71785 | 1,954 |
def load_fits(path):
"""
load the fits file
Parameters
----------
path: string, location of the fits file
Output
------
data: numpy array, of stokes images in (row, col, wv, pol)
header: hdul header object, header of the fits file
"""
hdul_tmp = fits.open(f'{path}')
data = np.asarray(... | f0040e9ef3c8b2e7e4136f0ef7a7a2f9370a3653 | 1,955 |
def get_image_path(cfg,
metadata,
prefix='diag',
suffix='image',
metadata_id_list='default',):
"""
Produce a path to the final location of the image.
The cfg is the opened global config,
metadata is the metadata dictionairy (fo... | 0c725311db7b3290923f6206cb2bb4d382644e12 | 1,956 |
def ProjectNameToBinding(project_name, tag_value, location=None):
"""Returns the binding name given a project name and tag value.
Requires binding list permission.
Args:
project_name: project name provided, fully qualified resource name
tag_value: tag value to match the binding name to
location: reg... | 00966f8b74378b905fe5b3c4e5a6716a5d4f71bf | 1,957 |
def degrees_of_freedom(s1, s2, n1, n2):
"""
Compute the number of degrees of freedom using the Satterhwaite Formula
@param s1 The unbiased sample variance of the first sample
@param s2 The unbiased sample variance of the second sample
@param n1 Thu number of observations in the first sample
@pa... | 5f076e33584c61dca4410b7ed47feb0043ec97cb | 1,958 |
import os
def get_requires_file(dist):
"""Get the path to the egg-info requires.txt file for a given dist."""
return os.path.join(
os.path.join(dist.location, dist.project_name + ".egg-info"),
"requires.txt",
) | f0fc66abc15fcba133240cc1783059d5694a08f6 | 1,959 |
def get_range_to_list(range_str):
"""
Takes a range string (e.g. 123-125) and return the list
"""
start = int(range_str.split('-')[0])
end = int(range_str.split('-')[1])
if start > end:
print("Your range string is wrong, the start is larger than the end!", range_str)
return range(sta... | a88d9780ac2eba1d85ae70c1861f6a3c74991e5c | 1,960 |
import base64
def get_saml_assertion(server, session, access_token, id_token=None):
"""
Exchange access token to saml token to connect to VC
Sample can be found at
https://github.com/vmware/vsphere-automation-sdk-python/blob/master/samples/vsphere/oauth/exchange_access_id_token_for_saml.py
"""
... | 174400720340fb831d6a62728b48555db7349b95 | 1,961 |
import sys
def insert_cluster_metadata(clconn, name, desc, cli, verbose=False):
"""
Insert the cluster metadata information in the SQL table and return its rowid.
This is the information that describes how the clusters were made.
:param clconn: the database connection
:param name: the name of the ... | bc7462f3911225a935183747263656d02ee761fc | 1,962 |
import html
def display_value(id, value):
"""
Display a value in a selector-like style.
Parameters
----------
id: int
Id of the value to be displayed
"""
return html.div(
{
"class": "py-3 pl-3 w-full border-[1px] sm:w-[48%] md:w-[121px] bg-nav rounded-[3px] md:... | aeb3ceeeb8a2048beb8df7f5d3e6027d90df4739 | 1,963 |
def helmholtz_adjoint_double_layer_regular(
test_point, trial_points, test_normal, trial_normals, kernel_parameters
):
"""Helmholtz adjoint double layer for regular kernels."""
wavenumber_real = kernel_parameters[0]
wavenumber_imag = kernel_parameters[1]
npoints = trial_points.shape[1]
dtype = t... | 6b640e2b7b02e124d893452b8437bfdf6f4af1ec | 1,964 |
import sys
import signal
def compute_vad(wav_rspecifier, feats_wspecifier, opts):
"""This function computes the vad based on ltsv features.
The output is written in the file denoted by feats_wspecifier,
and if the test_plot flaf is set, it produces a plot.
Args:
wav_rspecifier: An ark or scp file as in... | 7f652befce6b02c89f0e44d20dd08ee12b1e2783 | 1,965 |
def crt(s):
"""
Solve the system given by x == v (mod k),
where (k, v) goes over all key-value pairs of the dictionary s.
"""
x, n = 0, 1
for q, r in s.items():
x += n * ((r-x) * inverse(n, q) % q)
n *= q
return x | 6bcd489f9096cb780c935dd30ea90663d91f854f | 1,966 |
def create_new_tf_session(**kwargs):
"""Get default session or create one with a given config"""
sess = tf.get_default_session()
if sess is None:
sess = make_session(**kwargs)
sess.__enter__()
assert tf.get_default_session()
return sess | 1520f330fe7939c997588cf3d8c63265610baa23 | 1,967 |
import typing
import re
def MaybeGetHexShaOfLastExportedCommit(
repo: git.Repo, head_ref: str = "HEAD") -> typing.List[str]:
"""The the SHA1 of the most recently exported commit.
Args:
repo: The repo to iterate over.
head_ref: The starting point for iteration, e.g. the commit closest to
head.
... | 1d6afe688567ffe245e9aabe753c90e6baf22bfe | 1,968 |
def get_inchi(ID):
"""This function accept UNIQUE-ID and return InChI string of a certain compound"""
inchi = df_cpd['INCHI'][ID]
return inchi | 2420a73c2a5e21348c6efde7cd6bcde0cc0c0c00 | 1,969 |
import os
def run_samtools_faidx(job, ref_id):
"""
Use Samtools to create reference index file
:param JobFunctionWrappingJob job: passed automatically by Toil
:param str ref_id: FileStoreID for the reference genome
:return: FileStoreID for reference index
:rtype: str
"""
job.fileStore... | 6736d314c6fb72fcf019a41477e0b0bc77dd94bc | 1,970 |
from typing import Optional
def pad_to_multiple(array: Array,
factor: int,
axis: int,
mode: Optional[str] = 'constant',
constant_values=0) -> Array:
"""Pads `array` on a given `axis` to be a multiple of `factor`.
Padding will be conc... | 5164e124dc270a47ef8f8b1512cdefe796904791 | 1,971 |
import json
def easy2dict(config: easydict.EasyDict):
"""
:param config: EasyDict参数
"""
# fix a Bug: cfg = dict(config) 仅仅转换第一层easydict
cfg = json.loads(json.dumps(config))
return cfg | 08a69816f44dfa03d86124792c1da1355710426f | 1,972 |
import math
def define_request(
dataset,
query=None,
crs="epsg:4326",
bounds=None,
bounds_crs="EPSG:3005",
sortby=None,
pagesize=10000,
):
"""Define the getfeature request parameters required to download a dataset
References:
- http://www.opengeospatial.org/standards/wfs
-... | 215b39a606bfa7fc6736e8b2f61bf9c298412b36 | 1,973 |
from typing import List
from typing import Tuple
import torch
def get_bert_input(
examples: List[tuple],
) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor]:
"""Convert input list to torch tensor.
Args:
examples: (input_id_list, )
Returns:
attention_mask, input_ids_tensor, token_typ... | 954d0990d5cd5f28d588c472f7d7d48ecc4b3eb2 | 1,974 |
import io
import traceback
def _format_exception(e: BaseException):
"""
Shamelessly stolen from stdlib's logging module.
"""
with io.StringIO() as sio:
traceback.print_exception(e.__class__, e, e.__traceback__, None, sio)
return sio.getvalue().strip() | d80f60634a9862ca282b1c7ccf63ae8e945ffdc9 | 1,975 |
import json
def batch_deploy(blueprint_id,
parent_deployments,
group_id=None,
new_deployment_ids=None,
inputs=None,
labels=None,
**_):
"""
Create deployments for a batch from a single blueprint.
:param bl... | 8128e39c94bfc15a5b75d3a88274720b52d8d900 | 1,976 |
import json
def compute_task_def(build, settings, fake_build):
"""Returns a swarming task definition for the |build|.
Args:
build (model.Build): the build to generate the task definition for.
build.proto.infra and build.proto.input.properties must be initialized.
settings (service_config_pb2.Setti... | 7071960148ed391b42a4b7ad1e4ed4e6d0c10713 | 1,977 |
import traceback
from bs4 import BeautifulSoup
def parse_markdown(page, target=None, pages=None, categories=[], mode="html",
current_time="", bypass_errors=False):
"""Takes a page object (must contain "md" attribute) and returns parsed
and filtered HTML."""
target = get_target(target)
... | 4e079c4c9d5f9ac9891f515ebc806877f3568cc8 | 1,978 |
def draw_bs_pairs(x, y, func, size=1):
"""Perform pairs bootstrap for replicates."""
# Set up array of indices to sample from: inds
inds = np.arange(len(x))
# Initialize replicates
bs_replicates = np.empty(size)
# Generate replicates
for i in range(size):
bs_inds = np.random.choice... | f0b05241f567570dd96ed97340d5075b8ccb5a7b | 1,979 |
def has_hole(feature):
"""
Detects the number of holes in a shapely polygon or multipolygon.
Parameters
----------
feature : shapely Polygon or Multipolygon
polygon to be analyzed for holes
Returns
-------
int
number of holes
"""
if feature.geom_typ... | e854d7a4902e66ec95479816662a145e184ee8af | 1,980 |
def linder_table(file=None, **kwargs):
"""Load Linder Model Table
Function to read in isochrone models from Linder et al. 2019.
Returns an astropy Table.
Parameters
----------
age : float
Age in Myr. If set to None, then an array of ages from the file
is used to generate dicti... | ff6b187009c8bbcef8ae604095c289429863907e | 1,981 |
def json_redirect(request, url, **kwargs):
"""
Returns a JSON response for redirecting to a new URL. This is very specific
to this project and depends on the JavaScript supporting the result that
is returned from this method.
"""
if not request.is_ajax():
raise PermissionDenied("Must be ... | 7fbafcfc400c733badc26fcb97bc3a61f4c49f74 | 1,982 |
def unauthenticatedClient():
"""Retorna um api client sem ninguém autenticado"""
return APIClient() | b821a7c1e11a398eee691ca43be54d5aca00d213 | 1,983 |
import os
def filters_to_kcorrect(curve_file, verbose=False):
"""
Convert a filter response curve to the Kcorrect format.
This is used by Kcorrect and iSEDFit.
"""
if not os.path.isfile(curve_file):
raise IOError("# Cannot find the response curve file {}".format(curve_file))
# Read i... | 144e1af636778e503c394a615af741dc51f5f7d9 | 1,984 |
import re
def get_known_disk_attributes(model):
"""Get known NVMe/SMART attributes (model specific), returns str."""
known_attributes = KNOWN_DISK_ATTRIBUTES.copy()
# Apply model-specific data
for regex, data in KNOWN_DISK_MODELS.items():
if re.search(regex, model):
for attr, thresholds in data.ite... | 39ece3213996b201d1109d7787bcd8fed859235b | 1,985 |
def get_one_exemplar_per_class_proximity(proximity):
"""
unpack proximity object into X, y and random_state for picking exemplars.
----
Parameters
----
proximity : Proximity object
Proximity like object containing the X, y and random_state variables
required for picking exemplars... | eeb46d07a757d6b06432369f26f5f2391d9b14cd | 1,986 |
def annotation_layers(state):
"""Get all annotation layer names in the state
Parameters
----------
state : dict
Neuroglancer state as a JSON dict
Returns
-------
names : list
List of layer names
"""
return [l["name"] for l in state["layers"] if l["type"] == "annotat... | 98dee6b821fbfe2dd449859400c2166ba694025f | 1,987 |
def describe_bvals(bval_file) -> str:
"""Generate description of dMRI b-values."""
# Parse bval file
with open(bval_file, "r") as file_object:
raw_bvals = file_object.read().splitlines()
# Flatten list of space-separated values
bvals = [
item for sublist in [line.split(" ") for line ... | 1d19c71d9422a37f425c833df52d9b1936195660 | 1,988 |
def weight_update4(weights, x_white, bias1, lrate1, b_exp):
""" Update rule for infomax
This function recieves parameters to update W1
* Input
weights : unmixing matrix (must be a square matrix)
x_white: whitened data
bias1: current estimated bias
lrate1: current learning rate
b_exp : ex... | 6c2d5c6610724787b4e8c8fb42569265e4b13d76 | 1,989 |
def Dijkstra(graph, source):
"""
Dijkstra's algorithm for shortest path between two vertices on a graph.
Arguments
---------
graph -- directed graph; object of Graph class
source -- start vertex
>>> graph = Graph()
>>> graph.addVertex("A")
>>> conns = [ ("A", "B"), ("A", "C"), ("B"... | 9585c13c5504cdbff62494c2d5d97655c2281c34 | 1,990 |
def annealing_epsilon(episode: int, min_e: float, max_e: float, target_episode: int) -> float:
"""Return an linearly annealed epsilon
Epsilon will decrease over time until it reaches `target_episode`
(epsilon)
|
max_e ---|\
| \
| \
| \
mi... | fab650085f271f1271025e23f260eb18e645a9ba | 1,991 |
import jsonschema
def ExtendWithDefault(validator_class):
"""Takes a validator and makes it set default values on properties.
Args:
validator_class: A class to add our overridden validators to
Returns:
A validator_class that will set default values
and ignore required fields
... | 42ab80b2c52e474a354589eb4c6041450cf23fd2 | 1,992 |
def coach_input_line(call, school, f):
"""
Returns a properly formatted line about a coach.
:param call: (String) The beginning of the line, includes the gender, sport, and school abbreviation.
:param school:(String) The longform name of the school.
:param f: (String) The input line from the user.
... | 762127ac058949af890c2ef7f19b924642cc4c39 | 1,993 |
def pad_seq(seq, max_length, PAD=0):
"""
:param seq: list of int,
:param max_length: int,
:return seq: list of int,
"""
seq += [PAD for i in range(max_length - len(seq))]
return seq | bb61677bc658e22b317e3d5fb10f7c85a84200d0 | 1,994 |
def complex_domain(spectrogram):
"""
Complex Domain.
Parameters
----------
spectrogram : :class:`Spectrogram` instance
:class:`Spectrogram` instance.
Returns
-------
complex_domain : numpy array
Complex domain onset detection function.
References
----------
... | 10248ca5bb291326018934d654b2fee6a8a972d0 | 1,995 |
import torch
def toOneHot(action_space, actions):
"""
If action_space is "Discrete", return a one hot vector, otherwise just return the same `actions` vector.
actions: [batch_size, 1] or [batch_size, n, 1]
If action space is continuous, just return the same action vector.
"""
# One hot encod... | bad47c1f55795d16bdcd67aac67b4ae40a40363c | 1,996 |
def find_triangle(n):
"""Find the first triangle number with N divisors."""
t, i = 1, 1
while True:
i += 1
t += i
if len(divisors(t)) > n:
return t | b74e0e8fd869b4d9a9ae1fe83299f32eaa848e9a | 1,997 |
import requests
def get_main_page_soup(home_url):
""" parse main page soup"""
user_agent= 'Mozilla / 5.0 (Windows NT 10.0; Win64; x64) AppleWebKit / 537.36(KHTML, ' \
'like Gecko) Chrome / 64.0.3282.140 Safari / 537.36 Edge / 18.17763 '
headers = {'User-agent':user_agent}
# request to ... | 6100fa9b669ee498dea354418b3816bbc46b3b26 | 1,998 |
def gen_task4() -> np.ndarray:
"""Task 4: main corner of a triangle."""
canv = blank_canvas()
r, c = np.random.randint(GRID-2, size=2, dtype=np.int8)
syms = rand_syms(6) # 6 symbols for triangle
# Which orientation? We'll create 4
rand = np.random.rand()
if rand < 0.25:
# top left
rows, cols = [r,... | d367af38a74fd57eb86d001103a1f8656b395209 | 1,999 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.