content stringlengths 22 815k | id int64 0 4.91M |
|---|---|
def _nw_score_(s1, s2, insert=lambda c: -2,
delete=lambda c: -2,
substitute=lambda c1, c2: 2 if c1 == c2 else -1):
"""Compute Needleman Wunsch score for aligning two strings.
This algorithm basically performs the same operations as Needleman Wunsch
alignment, but is made more ... | 6,100 |
def load_gisaid_data(
*,
device="cpu",
min_region_size=50,
include={},
exclude={},
end_day=None,
columns_filename="results/usher.columns.pkl",
features_filename="results/usher.features.pt",
feature_type="aa",
) -> dict:
"""
Loads the two files columns_filename and features_fi... | 6,101 |
def _upload_blob_bucket(bucket_name, source_file_name, destination):
"""Uploads a file to the bucket."""
storage_client = storage.Client()
bucket = storage_client.bucket(bucket_name)
blob = bucket.blob(destination)
blob.upload_from_filename(source_file_name) | 6,102 |
def perform_query(query, terms, nodes, names, back_prop, prefix="", lineage=False):
"""
Search database based on a name or the ontological id.
"""
# The query is an exact match, print info.
if names.get(query) or terms.get(query):
# Get the GO or SO id
uid = names.get(query) or que... | 6,103 |
def get_symbol_size(sym):
"""Get the size of a symbol"""
return sym["st_size"] | 6,104 |
def stake_increase(nmr, model_id):
"""Increase your stake by `value` NMR."""
click.echo(napi.stake_increase(nmr, model_id)) | 6,105 |
def print_initial_mlperf_config(params, seed):
"""Prints MLPerf config."""
mlp_log.mlperf_print('cache_clear', value=True)
mlp_log.mlperf_print('init_start', value=None)
mlp_log.mlperf_print('global_batch_size', params['batch_size'])
mlp_log.mlperf_print('opt_name', value=FLAGS.optimizer)
mlp_log.mlperf_pri... | 6,106 |
def rotate(dst: ti.template(), src: ti.template(), t: float):
"""
rotate src to dst
"""
rot = ti.Matrix.rotation2d(2 * math.pi * t)
for i, j in ti.ndrange(N,N):
p = (ti.Vector([i,j]) / N - 0.5)
if p.norm() > 0.5:
dst[i,j] = [0.0, 0.0, 0.0]
continue
p ... | 6,107 |
def csv_to_table(db_engine_url: str, csv_path: str, table_name: str):
"""
Converting the csv to sql and updating the database with the values.
# Connecting to the database and using the pandas method to handle conversion
# https://pandas.pydata.org/pandas-docs/stable/generated/pandas.DataFrame.to_sql.ht... | 6,108 |
def plot_lightcurve(catalog, source):
"""Print info for CATALOG and SOURCE"""
catalog = source_catalogs[catalog]
source = catalog[source]
print()
print(source)
print()
# Generic info dict
# source.pprint()
# Specific source info print-out
# if hasattr(source, 'print_info'):
... | 6,109 |
def parse_args(argv):
"""Parse and validate command line flags"""
parser = argparse.ArgumentParser()
parser.add_argument(
'--base-image',
type=functools.partial(
validation_utils.validate_arg_regex, flag_regex=IMAGE_REGEX),
default='gcr.io/google-appengine/python:latest',... | 6,110 |
def to_unnamed_recursive(sexpr, scheme):
"""Convert all named column references to unnamed column references."""
def convert(n):
if isinstance(n, NamedAttributeRef):
n = toUnnamed(n, scheme)
n.apply(convert)
return n
return convert(sexpr) | 6,111 |
def SetupPyMOLObjectNamesForComplex(FileIndex, PyMOLObjectNames):
"""Stetup groups and objects for complex. """
PDBFileRoot = OptionsInfo["InfilesInfo"]["InfilesRoots"][FileIndex]
PDBGroupName = "%s" % PDBFileRoot
PyMOLObjectNames["PDBGroup"] = PDBGroupName
PyMOLObjectNames["PDBGroupMember... | 6,112 |
def setup(bot):
"""Setup
The function called by Discord.py when adding another file in a multi-file project.
"""
bot.add_cog(General(bot)) | 6,113 |
def login_process():
"""Process login."""
email_address = request.form.get("email")
password = request.form.get("password")
user = User.query.filter_by(email_address=email_address).first()
if not user:
flash("Please try again!")
return redirect('/')
if user.password != passwor... | 6,114 |
def get_doc_translations(doctype, name):
"""
Returns a dict custom tailored for the document.
- Translations with the following contexts are handled:
- doctype:name:docfield
- doctype:name
- doctype:docfield (Select fields only)
- 'Select' docfields will have a values dict which will have
trans... | 6,115 |
def test_account_created(requestbin, login, ui_account):
"""
Test:
- Create account
- Get webhook response for created
- Assert that webhook response is not None
- Assert that response xml body contains right account name
"""
webhook = requestbin.get_webhook("created", st... | 6,116 |
def fetch_newer_version(
installed_version=scancode_version,
new_version_url='https://pypi.org/pypi/scancode-toolkit/json',
force=False,
):
"""
Return a version string if there is an updated version of scancode-toolkit
newer than the installed version and available on PyPI. Return None
other... | 6,117 |
def app(request):
"""
Default view for Person Authority App
"""
return direct_to_template(request,
'person_authority/app.html',
{'app':APP}) | 6,118 |
def is_xbar(top, name):
"""Check if the given name is crossbar
"""
xbars = list(filter(lambda node: node["name"] == name, top["xbar"]))
if len(xbars) == 0:
return False, None
if len(xbars) > 1:
log.error("Matching crossbar {} is more than one.".format(name))
raise SystemExit... | 6,119 |
def index():
""" Root URL response """
return "Reminder: return some useful information in json format about the service here", status.HTTP_200_OK | 6,120 |
def backproject(depth, K):
"""Backproject a depth map to a cloud map
depth: depth
----
organized cloud map: (H,W,3)
"""
H, W = depth.shape
X, Y = np.meshgrid(np.asarray(range(W)) - K[0, 2], np.asarray(range(H)) - K[1, 2])
return np.stack((X * depth / K[0, 0], Y * depth / K[1, 1], depth)... | 6,121 |
async def test_refresh_codes(hass, lock_data, caplog):
"""Test refresh_codes"""
await setup_ozw(hass, fixture=lock_data)
state = hass.states.get("lock.smartcode_10_touchpad_electronic_deadbolt_locked")
assert state is not None
assert state.state == "locked"
assert state.attributes["node_id"] ==... | 6,122 |
def get_parent_dir(os_path: str) -> str:
"""
Get the parent directory.
"""
return str(Path(os_path).parents[1]) | 6,123 |
def test_is_admin():
"""Returns True if the program is ran as administrator.
Returns False if not ran as administrator.
"""
try:
is_admin = (os.getuid() == 0)
except AttributeError:
is_admin = ctypes.windll.shell32.IsUserAnAdmin() != 0
if is_admin == 1:
return 1
else:... | 6,124 |
def parse_work_url(work_url):
"""Extract work id from work url
Args:
work_url (str): work url
Returns:
str: bdrc work id
"""
work_id = ""
if work_url:
work_url_parts = work_url.split("/")
work_id = work_url_parts[-1]
return work_id | 6,125 |
def room():
"""Create a Room instance for all tests to share."""
return Room({"x": 4, "y": 4, "z": 4}, savable=False) | 6,126 |
def lab_pull(tag, bucket, project, force):
""" Pulls Lab Experiment from minio to current directory """
home_dir = os.path.expanduser('~')
lab_dir = os.path.join(home_dir, '.lab')
if not os.path.exists(lab_dir):
click.secho('Lab is not configured to connect to minio. '
'Run... | 6,127 |
def searchLiteralLocation(a_string, patterns):
"""assumes a_string is a string, being searched in
assumes patterns is a list of strings, to be search for in a_string
returns a list of re span object, representing the found literal if it exists,
else returns an empty list"""
results = []
for patt... | 6,128 |
def UIOSelector_Highlight(inUIOSelector):
"""
Highlight (draw outline) the element (in app) by the UIO selector.
:param inUIOSelector: UIOSelector - List of items, which contains condition attributes
:return:
"""
# Check the bitness
lSafeOtherProcess = UIOSelector_SafeOtherGet_Process(inUI... | 6,129 |
def _calc_paths ():
"""
Essentially Floyd-Warshall algorithm
"""
def dump ():
for i in sws:
for j in sws:
a = path_map[i][j][0]
#a = adjacency[i][j]
if a is None: a = "*"
print a,
print
sws = switches.values()
path_map.clear()
for k in sws:
for j,port ... | 6,130 |
def clear_cts_counters(device):
""" Clear CTS credentials
Args:
device ('obj'): device to use
Returns:
None
Raises:
SubCommandFailure: Failed to clear cts counters
"""
try:
device.execute('clear cts role-based counters')
except SubComma... | 6,131 |
def completeMessage_BERT(mod, tok, ind, max_length=50):
"""
Sentence Completion of the secret text from BERT
"""
tokens_tensor = torch.tensor([ind])
outInd = mod.generate(tokens_tensor, max_length=50)
outText=tok.decode(outInd[0].tolist())
newText=outText[len(tok.decode(ind)):]
newText=newText.split(se... | 6,132 |
def fast_star(x, y, points=20, outer=100, inner=50, **kwargs):
""" Draws a star with the given points, outer radius and inner radius.
The current stroke, strokewidth and fill color are applied.
"""
scale = gcd(inner, outer)
iscale = inner / scale
oscale = outer / scale
cached = _stars.ge... | 6,133 |
def vshcoefs():
"""Test plot of a tangential vector function given by vsh
coefficients."""
theta = numpy.linspace(0.0, math.pi, num=32)
phi = numpy.linspace(0.0, 2*math.pi, num=32)
PHI, THETA = numpy.meshgrid(phi, theta)
cfl = [[[0.0,1.0,-1.0]],[[0.0,0*1.0j,0*1.0j]]]
cfs = vshCoefs(cfl)
... | 6,134 |
def kill(pyngrok_config=None):
"""
Terminate the ``ngrok`` processes, if running, for the given config's ``ngrok_path``. This method will not
block, it will just issue a kill request.
:param pyngrok_config: A ``pyngrok`` configuration to use when interacting with the ``ngrok`` binary,
overridin... | 6,135 |
def _select_train_and_seat_type(train_names, seat_types, query_trains):
"""
选择订票车次、席别
:param train_names 预定的车次列表
:param seat_types 预定席别列表
:param query_trains 查询到火车车次列表
:return select_train, select_seat_type
"""
def _select_trains(query_trains, train_names=None):
if train_names:
... | 6,136 |
def load_real_tcs():
""" Load real timecourses after djICA preprocessing """
try:
return sio.loadmat(REAL_TC_DIR)['Shat'][0]
except KeyError:
try:
return sio.loadmat(REAL_TC_DIR)['Shat_'][0]
except KeyError:
print("Incorrect key")
pass | 6,137 |
def create_fsaverage_forward(epoch, **kwargs):
"""
A forward model is an estimation of the potential or field distribution for a known source
and for a known model of the head. Returns EEG forward operator with a downloaded template
MRI (fsaverage).
Parameters:
epoch: mne.epochs.Epochs
... | 6,138 |
def attach_client_to_session(
session, project_name, dataset_name, client_state):
"""Attach the client state to the session, by saving its id.
Args:
session
project_name
dataset_name
client_state: nc_models.ClientState
"""
sel_id_name = client_id_name(project_nam... | 6,139 |
def make_file(path):
"""
Factory function for File strategies
:param str path: A local relative path or s3://, file:// protocol urls
:return:
"""
try:
if not is_valid_url(path):
return LocalFile(os.path.abspath(path))
url_obj = urlparse(path)
if url_obj.sch... | 6,140 |
def sort(cfile):
"""
Sort the ExoMol .trans files by wavenumber for MARVELized .states files
Parameters
----------
cfile: String
A repack configuration file.
"""
banner = 70 * ":"
args = parser(cfile)
files, dbtype, outfile, tmin, tmax, dtemp, wnmin, wnmax, dwn, \
st... | 6,141 |
def midi_to_hz(notes):
"""Hello Part 6! You should add documentation to this function.
"""
return 440.0 * (2.0 ** ((np.asanyarray(notes) - 69.0) / 12.0)) | 6,142 |
def test_eq_score():
"""Test the score function when the score found should be returned"""
not_reported_score = 1
score_function = ScoreFunction(match_type='integer', equal=True)
score_function.set_not_reported(not_reported_score)
assert score_function.get_score(3) == 3
asser... | 6,143 |
def bond_stereo_parities(chi, one_indexed=False):
""" Parse the bond stereo parities from the stereochemistry layers.
:param chi: ChI string
:type chi: str
:param one_indexed: Return indices in one-indexing?
:type one_indexed: bool
:returns: A dictionary mapping bond keys on... | 6,144 |
def filter_camera_angle(places):
"""Filter camera angles for KiTTI Datasets"""
bool_in = np.logical_and((places[:, 1] < places[:, 0] - 0.27), (-places[:, 1] < places[:, 0] - 0.27))
# bool_in = np.logical_and((places[:, 1] < places[:, 0]), (-places[:, 1] < places[:, 0]))
return places[bool_in] | 6,145 |
def neural_log_literal_function(identifier):
"""
A decorator for NeuralLog literal functions.
:param identifier: the identifier of the function
:type identifier: str
:return: the decorated function
:rtype: function
"""
return lambda x: registry(x, identifier, literal_functions) | 6,146 |
def get_factors(n: int) -> list:
"""Returns the factors of a given integer.
"""
return [i for i in range(1, n+1) if n % i == 0] | 6,147 |
def fetch_tables():
""" Used by the frontend, returns a JSON list of all the tables including metadata. """
return jsonify([
{
"tab": "animeTables",
"name": "Anime",
"tables": [
{
"id": "englishAnimeSites",
"titl... | 6,148 |
def configure_logger(logger, logfile):
"""Configure logger"""
formatter = logging.Formatter(
"%(asctime)s :: %(levelname)s :: %(message)s")
file_handler = RotatingFileHandler(logfile, "a", 1000000, 1)
# Add logger to file
if (config.w.conf_file.get_w_debug().title() == 'True'):
logg... | 6,149 |
def base_positive_warps():
"""
Get warp functions associated with domain (0,inf), scale 1.0
Warp function is defined as f(x) = log(exp(x)-1)
Returns
-------
Callable[torch.Tensor,torch.Tensor],
Callable[torch.Tensor,torch.Tensor],
Callable[torch.Tensor,torch.Tensor]
Function... | 6,150 |
def dev_work_create():
"""
Create work order.
:return:
"""
db_ins = current_user.dbs
audits = User.query.filter(User.role == 'audit')
form = WorkForm()
if form.validate_on_submit():
sql_content = form.sql_content.data
db_ins = form.db_ins.data
shard = form.shard.... | 6,151 |
def blur(x, mean=0.0, stddev=1.0):
"""
Resize to smaller size (AREA) and then resize to original size (BILINEAR)
"""
size = tf.shape(x)[:2]
downsample_factor = 1 + tf.math.abs(tf.random.normal([], mean=mean, stddev=stddev))
small_size = tf.to_int32(tf.to_float(size)/downsample_factor)
x = tf.image.resize_... | 6,152 |
def curve(ini_file,
cali_fits,
out_lc_png=None,
fig_set=None,
noplot=False,
overwrite=False,
log=None,
extra_config=None):
"""
plot light curve, calibration with giving data
:param ini_file:
:param cali_fits:
:param out_lc_png: if... | 6,153 |
def test_regression_gch(sample_inputs_fixture):
"""
Tandem turbines with the upstream turbine yawed and yaw added recovery
correction enabled
"""
sample_inputs_fixture.floris["wake"]["properties"][
"velocity_model"
] = VELOCITY_MODEL
sample_inputs_fixture.floris["wake"]["properties"]... | 6,154 |
def test_extend_dict_key_value(minion_opts, local_salt):
"""
Test the `extend_dict_key_value` Jinja filter.
"""
rendered = render_jinja_tmpl(
"{{ {} | extend_dict_key_value('foo:bar:baz', [42]) }}",
dict(opts=minion_opts, saltenv="test", salt=local_salt),
)
assert rendered == "{'... | 6,155 |
def improve(update, close, guess=1, max_updates=100):
"""Iteratively improve guess with update until close(guess) is true or
max_updates have been applied."""
k = 0
while not close(guess) and k < max_updates:
guess = update(guess)
k = k + 1
return guess | 6,156 |
def images_to_sequence(tensor):
"""Convert a batch of images into a batch of sequences.
Args:
tensor: a (num_images, height, width, depth) tensor
Returns:
(width, num_images*height, depth) sequence tensor
"""
num_image_batches, height, width, depth = _shape(tensor)
transposed = tf.transpose(tenso... | 6,157 |
def transform_regions(regions: list[dict[str, Any]]) -> list[dict[str, Any]]:
"""
Transform aggregated region data for map
regions -- aggregated data from region pipeline
"""
records = []
for record in regions:
if "latitude" in record["_id"].keys():
if record["admin3"]:
... | 6,158 |
def _file_to_import_exists(storage_client: storage.client.Client,
bucket_name: str, filename: str) -> bool:
"""Helper function that returns whether the given GCS file exists or not."""
storage_bucket = storage_client.get_bucket(bucket_name)
return storage.Blob(
bucket=storage_buc... | 6,159 |
def run(args, options):
""" Compile a file and output a Program object.
If options.merge_opens is set to True, will attempt to merge any
parallelisable open instructions. """
prog = Program(args, options)
VARS['program'] = prog
if options.binary:
VARS['sint'] = GC_types.sbitint... | 6,160 |
def get_relative_poses(
num_frames: int,
frames: np.ndarray,
selected_track_id: Optional[int],
agents: List[np.ndarray],
agent_from_world: np.ndarray,
current_agent_yaw: float,
) -> Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray]:
"""
Internal function that creates the targets and ... | 6,161 |
def read_mat_cplx_bin(fname):
"""
Reads a .bin file containing floating-point values (complex) saved by Koala
Parameters
----------
fname : string
Path to the file
Returns
-------
buffer : ndarray
An array containing the complex floating-point values read from the file
... | 6,162 |
def test_init_subclass_attrs():
"""
`__init_subclass__` works with attrs classes as long as slots=False.
"""
@attr.s(slots=False)
class Base:
def __init_subclass__(cls, param, **kw):
super().__init_subclass__(**kw)
cls.param = param
@attr.s
class Attrs(Base,... | 6,163 |
def DeleteDataBundle(**kwargs):
"""
Deletes a Data Bundle by ID.
:param kwargs:
:return:
"""
data_bundle_id = kwargs['data_bundle_id']
del data_bundles[data_bundle_id]
return(kwargs, 200) | 6,164 |
def determineactions(repo, deficiencies, sourcereqs, destreqs):
"""Determine upgrade actions that will be performed.
Given a list of improvements as returned by ``finddeficiencies`` and
``findoptimizations``, determine the list of upgrade actions that
will be performed.
The role of this function i... | 6,165 |
def filter_ccfs(ccfs, sc_thresh, min_ccf):
"""
Remove noisy ccfs from irrelevant experiments
:param ccfs: 2d array
:param sc_thresh: int
number of sign changes expected
:param min_ccf: float
cutoff value for a ccf to be above the noise threshold
:return:
"""
if sc_thresh ... | 6,166 |
def market_data(symbol, expirationDate, strike, optionType, info=None):
"""Gets option market data from information. Takes time to load pages."""
assert all(isinstance(i, str) for i in [symbol, expirationDate, strike, optionType])
return robin_stocks.options.get_option_market_data(symbol, expirationDate, strike, opt... | 6,167 |
def close_logger(logger):
"""Filehandles etc are not closed automatically, so close them here"""
if logger is not None:
handlers = logger.handlers[:]
for handler in handlers:
handler.close()
logger.removeHandler(handler) | 6,168 |
def get_documents_embeddings (y, embedder, column):
"""
Given a Dataframe containing study_id and a text column, return a numpy array of embeddings
The idea of this function is to prevent to embed two times the same text (for computation efficiency)
Parameters:
-----------
... | 6,169 |
def index(request):
"""view fonction de la page d'accueil
Render templates de la page d'accueil
"""
return render(request, "t_myapp/index.html") | 6,170 |
def cidr_mask_to_subnet_mask(mask_num):
"""
掩码位数转换为点分掩码
:param mask_num: 掩码位数, 如 16
:return: 十进制点分ipv4地址
"""
return convert_to_ipv4(cidr_mask_to_ip_int(mask_num), stype='int') | 6,171 |
def correlate(A,B,
rows=None,columns=None, mode_row='zero', mode_column='zero'):
"""Correlate A and B.
Input:
------
A,B : array
Input data.
columns : int
Do correlation at columns 0..columns, defaults to the number of columns in A.
rows : int
Do correlatio... | 6,172 |
def question_print( instr ) :
"""
function question_print
by Charles Stanier charles-stanier@uiowa.edu Aug 9, 2019
purpose: this takes a string and prints it with added question marks
modification history: none
input arguments: instr is intended to be a variable of type str (string)
"... | 6,173 |
def plot_two_series(A, B, variable, title):
"""Plot two series using the same `date` index.
Parameters
----------
A, B: pd.DataFrame
Dataframe with a `date` key and a variable
passed in the `variable` parameter. Parameter A
represents the "Observed" series and B the "Predic... | 6,174 |
def getChrLenList(chrLenDict, c):
""" Given a chromosome length dictionary keyed on chromosome names and
a chromosome name (c) this returns a list of all the runtimes for a given
chromosome across all Step names.
"""
l = []
if c not in chrLenDict:
return l
for n in chrLenDict[c]:
... | 6,175 |
def build_arg_parser():
"""
Build an argument parser using argparse. Use it when python version is 2.7 or later.
"""
parser = argparse.ArgumentParser(description="Smatch table calculator -- arguments")
parser.add_argument("--fl", type=argparse.FileType('r'), help='AMR ID list file')
parser.add_... | 6,176 |
def query_sessions(user_id: Optional[int]) -> TList[Session]:
"""
Return all user's sessions
:param user_id: current user ID (None if user auth is disabled)
:return: list of session objects
"""
adb = get_data_file_db(user_id)
return [Session(db_session) for db_session in adb.query(DbSessio... | 6,177 |
def positionalencoding3d(d_model, dx, dy, dz):
"""
:param d_model: dimension of the model
:param height: height of the positions
:param width: width of the positions
:return: d_model*height*width position matrix
"""
# if d_model % 6 != 0:
# raise ValueError("Cannot use sin/cos position... | 6,178 |
def make_headers(context: TraceContext) -> Headers:
"""Creates dict with zipkin headers from supplied trace context.
"""
headers = {
TRACE_ID_HEADER: context.trace_id,
SPAN_ID_HEADER: context.span_id,
FLAGS_HEADER: '0',
SAMPLED_ID_HEADER: '1' if context.sampled else '0',
... | 6,179 |
def test_2k7campeonatojuizforano(caplog):
"""Very hard to parse comments (line braks, few markers)"""
TWDA = twda._TWDA()
with open(
os.path.join(os.path.dirname(__file__), "2k7campeonatojuizforano.html")
) as f:
TWDA.load_html(f)
assert len(TWDA) == 1
assert TWDA["2k7campeonatoj... | 6,180 |
def shiftRightUnsigned(col, numBits):
"""Unsigned shift the given value numBits right.
>>> df = spark.createDataFrame([(-42,)], ['a'])
>>> df.select(shiftRightUnsigned('a', 1).alias('r')).collect()
[Row(r=9223372036854775787)]
"""
sc = SparkContext._active_spark_context
jc = sc._jvm.functio... | 6,181 |
def scalar_sub(x: Number, y: Number) -> Number:
"""Implement `scalar_sub`."""
_assert_scalar(x, y)
return x - y | 6,182 |
def predict_attack(h1,h2,h3,h4,h5,h6,h7,h8,h9,h10,h11,h12,h13):
"""
Parameters:
-name:h1
in:query
type:number
required=True
-name:h5
in:query
type:number
required:True
-name:h4
in:query
type:number
required:True
... | 6,183 |
def _check_varrlist_integrity(vlist):
"""Return true if shapes and datatypes are the same"""
shape = vlist[0].data.shape
datatype = vlist[0].data.dtype
for v in vlist:
if v.data.shape != shape:
raise(Exception("Data shapes don't match"))
if v.data.dtype != datatype:
... | 6,184 |
def subclassfactory(fact_method):
"""fact_method takes the same args as init and returns the subclass appropriate to those args
that subclass may in turn override the same factory method and choose amoung it's subclasses.
If this factory method isn't overridden in the subclass an object of that class is ini... | 6,185 |
def test_get_partitions():
"""Test getting the partitions property."""
assert_true(type(m.partitions) is tuple) | 6,186 |
def simulate_var1(x_tnow, b, mu, sigma2, m_, *, j_=1000, nu=10**9,
init_value=True):
"""For details, see here.
Parameters
----------
x_tnow : array, shape(n_, )
b : array, shape(n_,n_)
mu : array, shape(n_, )
sigma2 : array, shape(n_,n_)
m_ : int
... | 6,187 |
def is_drom(insee_city: Optional[str] = None, insee_region: Optional[str] = None) -> bool:
"""
Est-ce que le code INSEE de la ville ou de la région correspond à un DROM ?
Args:
insee_city: Code INSEE de la ville
insee_region: Code INSEE de la région
Returns:
Vrai ssi le code IN... | 6,188 |
def pack_rows(rows, bitdepth):
"""Yield packed rows that are a byte array.
Each byte is packed with the values from several pixels.
"""
assert bitdepth < 8
assert 8 % bitdepth == 0
# samples per byte
spb = int(8 / bitdepth)
def make_byte(block):
"""Take a block of (2, 4, or 8)... | 6,189 |
def reset_user_pwd(username: str) -> int:
"""
:param username: 用户名
:return: 结果代码: 1: 成功, 0: 失败
"""
return update_user_info(username=username, args={
'password': '12345678'
}) | 6,190 |
def configurationChanged(options, jsonFile):
"""
We received a new JSON configuration file
"""
audit("configurationChanged " + jsonFile)
if options["ignorefile"] == "yes":
trace("skipping database reconfiguration because skip_configuration_file exists")
return
if not os.path.isf... | 6,191 |
def location_engineering(df: pd.DataFrame) -> pd.DataFrame:
"""Call the `location_dict()` function to get the location dictionary and the
`location_dataframe()` one to add the location dictionary info to the DataFrame.
Parameters
----------
df :
The dataframe to work with.
Returns
... | 6,192 |
def extract_energyxtb(logfile=None):
"""
Extracts xtb energies from xtb logfile using regex matching.
Args:
logfile (str): Specifies logfile to pull energy from
Returns:
energy (list[float]): List of floats containing the energy in each step
"""
re_energy = re.compile("energy:... | 6,193 |
def GetPID():
"""Returns the PID of the shell."""
return os.getppid() | 6,194 |
def render(history_lines, out_file):
"""Read historical data and save to out_file as img."""
dts = []
prs = []
queued = []
daily_happiness = [] # Percentage of last day queue was not blocked
merge_rate = [] # Merge rate for the past 24 active hours
real_merge_rate = [] # Merge rate includ... | 6,195 |
def make_transaction_frame(transactions):
"""
Formats a transaction DataFrame.
Parameters
----------
transactions : pd.DataFrame
Contains improperly formatted transactional data.
Returns
-------
df : pd.DataFrame
Daily transaction volume and dollar ammount.
- S... | 6,196 |
def find_or_create_role(name, desc):
""" Find existing role or create new role """
role = Role.query.filter(Role.name == name).first()
if not role:
role = Role(name=name, desc=desc)
return role
return role | 6,197 |
def enumerate_shapefile_fields(shapefile_uri):
"""Enumerate all the fielfd in a shapefile.
Inputs:
-shapefile_uri: uri to the shapefile which fields have to be
enumerated
Returns a nested list of the field names in the order they are stored
in the layer, and grouppe... | 6,198 |
def parse_nrrdvector(inp):
"""Parse a vector from a nrrd header, return a list."""
assert inp[0] == '(', "Vector should be enclosed by parenthesis."
assert inp[-1] == ')', "Vector should be enclosed by parenthesis."
return [_to_reproducible_float(x) for x in inp[1:-1].split(',')] | 6,199 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.