content stringlengths 22 815k | id int64 0 4.91M |
|---|---|
def get_supported():
"""
Returns a list of hints supported by the window manager.
:return: A list of atoms in the _NET_SUPPORTED property.
:rtype: util.PropertyCookie (ATOM[]/32)
"""
return util.PropertyCookie(util.get_property(root, '_NET_SUPPORTED')) | 9,400 |
def get_exportables():
"""Get all exportables models except snapshot"""
exportables = set(converters.get_exportables().values())
exportables.discard(all_models.Snapshot)
return exportables | 9,401 |
def add_control_and_colors(name, cs, userDefined, arrayRanges, disableValues):
"""add parameters that change the settings and color of a filter"""
source = paraview.simple.FindSource(name)
# make up list of color options
fields = {'depth': 'depth'}
if not disableValues:
fields['luminance'] =... | 9,402 |
def playback(driver, settings, record, output, mode=None): # pylint: disable=W0621,R0912
"""
Playback a given test.
"""
if settings.desc:
output("%s ... " % settings.desc, flush=True)
else:
output("Playing back %s ... " % settings.name, flush=True)
_begin_browsing(driver, settin... | 9,403 |
def weighted_moments(values, weights):
"""Return weighted mean and weighted standard deviation of a sequence"""
w_mean = np.average(values, weights=weights)
sq_err = (values - w_mean)**2
w_var = np.average(sq_err, weights=weights)
w_std = np.sqrt(w_var)
return w_mean, w_std | 9,404 |
def tallennus(lista, tiedosto):
"""
Tallentaa pelin statsit tiedostoon.
Lista sisältää tulokset, jotka halutaan tallentaa
"""
tuloksia = []
try:
with open(tiedosto, "r+") as lahde:
data = json.load(lahde)
data.append(lista)
lahde.seek(0)
... | 9,405 |
def forestplot(data, kind='forestplot', model_names=None, var_names=None, combined=False,
credible_interval=0.95, quartiles=True, r_hat=True, n_eff=True, colors='cycle',
textsize=None, linewidth=None, markersize=None, joyplot_alpha=None,
joyplot_overlap=2, figsize=None):
... | 9,406 |
def write_file(filename, data, plain=False): # pylint: disable=too-many-branches
"""
Write a file, use suffix to determine type and compression.
- types: '.json', '.yaml'
- compression: None, '.gz'
write_file('variable.json.gz')
"""
if '.json' in filename:
content = ujson.dumps(da... | 9,407 |
def by_major_predictor(profiles, subject_ids, subject_id_dict, course_data=None):
"""Generates recommendations for people with the same major."""
# Build a list of majors/minors
all_courses_of_study = set()
for profile in profiles:
all_courses_of_study |= profile.courses_of_study
# Find co... | 9,408 |
def SetupPythonPackages(system, wheel, base_dir):
"""Installs python package(s) from CIPD and sets up the build environment.
Args:
system (System): A System object.
wheel (Wheel): The Wheel object to install a build environment for.
base_dir (str): The top-level build directory for the wheel.
Ret... | 9,409 |
def tf_dtype(dtype):
"""Translates dtype specifications in configurations to tensorflow data types.
Args:
dtype: String describing a numerical type (e.g. 'float'), numpy data type,
or numerical type primitive.
Returns: TensorFlow data type
"""
if dtype == 'float... | 9,410 |
def test_getitem(tree):
"""Nodes can be accessed via getitem."""
for node_id in tree:
assert tree[node_id]
# assert 'Node access should be possible via getitem.' in str(exc)
with pytest.raises(NodeNotFound) as exc:
assert tree['root']
assert "Node 'root' is not in the tree" in str... | 9,411 |
def winedll_override(dll, dtype):
""" Add WINE dll override
"""
log.info('Overriding ' + dll + '.dll = ' + dtype)
protonmain.dlloverrides[dll] = dtype | 9,412 |
def decode_frame(raw_frame: bytes, frame_width: int, frame_height: int) -> Tuple[str, np.ndarray]:
"""
Decode the image bytes into string compatible with OpenCV
:param raw_frame: frame data in bytes
:param frame_width width of the frame, obtained from Kinesis payload
:param frame_height height of th... | 9,413 |
def preprocess(picPath):
"""preprocess"""
#read img
bgr_img = cv.imread(picPath)
#get img shape
orig_shape = bgr_img.shape[:2]
#resize img
img = cv.resize(bgr_img, (MODEL_WIDTH, MODEL_HEIGHT)).astype(np.int8)
# save memory C_CONTIGUOUS mode
if not img.flags['C_CONTIGUOUS']:
i... | 9,414 |
def live_fractal_or_skip():
"""
Ensure Fractal live connection can be made
First looks for a local staging server, then tries QCArchive.
"""
try:
return FractalClient("localhost:7777", verify=False)
except (requests.exceptions.ConnectionError, ConnectionRefusedError):
print("Fail... | 9,415 |
def get_legendre(theta, keys):
"""
Calculate Schmidt semi-normalized associated Legendre functions
Calculations based on recursive algorithm found in "Spacecraft Attitude Determination and Control" by James Richard Wertz
Parameters
----------
theta : array
Array of colatitudes in ... | 9,416 |
def train_and_evaluate(
model,
num_epochs,
steps_per_epoch,
train_data,
validation_steps,
eval_data,
output_dir,
n_steps_history,
FLAGS,
decay_type,
learning_rate=3e-5,
s=1,
n_batch_decay=1,
metric_accuracy='metric',
):
"""
Compiles keras model and loads d... | 9,417 |
def table_4_28(x_t, c_):
"""
Вывод поправочного коэффициента, учитывающего влияние толщины профиля
arguments: относительное положение точки перехода ламинарного пограничного слоя в турбулентный (Х_т_),
относительная толщина профиля
return: Значение поправочного коэффициента"""
nu_t_00 = [1.00, ... | 9,418 |
def random_spectra(path_length, coeffs, min_wavelength, max_wavelength, complexity):
"""
"""
solution = random_solution(coeffs, complexity)
return beers_law(solution, path_length, coeffs, min_wavelength, max_wavelength) | 9,419 |
async def channel_audition_info(channel, type_of_audition_color, type_of_audition_line) -> None:
"""
функция для оповещения об удалении/создании канала на сервере
:param channel: канал о котором сообщается
:param type_of_audition_color: цвет эмбеда
:param type_of_audition_line: линии для эмбеда
... | 9,420 |
def data_prep(data,unit_identifier,time_identifier,matching_period,treat_unit,control_units,outcome_variable,
predictor_variables, normalize=False):
"""
Prepares the data by normalizing X for section 3.3. in order to replicate Becker and Klößner (2017)
"""
X = data.loc[data[time_ident... | 9,421 |
async def ping(ctx):
"""pongs a ping"""
await ctx.send("pong") | 9,422 |
def calculate_auroc_statistics(y_true, y_pred, confint_alpha=0.05):
"""
calculate AUROC and it's p-values and CI
"""
#TODO: small sample test
#TODO: check when it crashes
#TODO: confidence intervals
predictions_group0 = y_pred[y_true==0, 1]
predictions_group1 = y_pred[y_true==1, 1... | 9,423 |
def get_list_of_users(total_users):
"""Get List of GitHub Users."""
users = {}
try:
r = requests.get('https://api.github.com/users', headers={'Accept': 'application/vnd.github.v3+json'})
users = r.json()
len_users = len(users)
# login, id, avatar_url, type, html_url
... | 9,424 |
def set_base_initial_condition(model, monomer, value):
"""Set an initial condition for a monomer in its 'default' state."""
# Build up monomer pattern dict
sites_dict = {}
for site in monomer.sites:
if site in monomer.site_states:
if site == 'loc' and 'cytoplasm' in monomer.site_stat... | 9,425 |
def kabsch_superpose(P, Q): # P,Q: vstack'ed matrix
"""
Usage:
P = numpy.vstack([a2, b2, c2])
Q = numpy.vstack([a1, b1, c1])
m = kabsch_superpose(P, Q)
newP = numpy.dot(m, P)
"""
A = numpy.dot(numpy.transpose(P), Q)
U, s, V = numpy.linalg.svd(A)
tmp = numpy.identity(3)
tmp[2... | 9,426 |
def test_list_unsigned_short_max_length_3_nistxml_sv_iv_list_unsigned_short_max_length_4_3(mode, save_output, output_format):
"""
Type list/unsignedShort is restricted by facet maxLength with value 8.
"""
assert_bindings(
schema="nistData/list/unsignedShort/Schema+Instance/NISTSchema-SV-IV-list-... | 9,427 |
def __cancelButtonEvent(event):
"""Handle pressing Esc by clicking the Cancel button."""
global boxRoot, __widgetTexts, __replyButtonText
__replyButtonText = CANCEL_TEXT
boxRoot.quit() | 9,428 |
def add(a_t, b_t):
"""
add operator a+b
"""
return add_op(a_t, b_t) | 9,429 |
def test_bad_doc():
"""run the cli_twine function on input file
containing un-compileable Python code.
The bad code block just gets skipped.
"""
mydoc = """\
```python .important foo=bar
import
print
```
bar
```python
print(2)
```
"""
with TemporaryDirectory() as tmpdirname:
infile_path = f"{tmpdirna... | 9,430 |
def download_pkg():
"""第二步下载相关环境需要的第三方库
:return: bool
"""
print("正在下载安装必要的第三方库文件...")
try:
# 如果需要使用IT之家爬虫还需要下载selenium、BeautifulSoup4、requests。可添加到后面
os.system('pip install flask flask_cors flask_wtf flask_mail pymysql redis apscheduler xlwt psutil ')
print("安装成功...")
... | 9,431 |
def test_get_jobs(limit, expected_number, auth_client):
"""Tests get method for api/jobs with various query strings"""
auth_client = auth_client[0]
query_string = "api/jobs"
if limit is not None:
query_string += f"?limit={limit}"
response = auth_client.get(query_string)
jobs_received... | 9,432 |
def construct_annotated_corpora(extraction_path, id_variant_path, corpus_name, target_dir):
""" Compiles ID-variant corpora annotated with evaluation-relevant information, i.e. normalized surprisal,
normalized UID, and sentence length, by extracting low-ID and high-ID entries from the annotated 90k Europarl
... | 9,433 |
def GetExtensionDescriptor(full_extension_name):
"""Searches for extension descriptor given a full field name."""
return _pool.FindExtensionByName(full_extension_name) | 9,434 |
def upload_file(file_name, bucket, object_name=None):
"""Upload a file to an S3 bucket
:param file_name: File to upload
:param bucket: Bucket to upload to
:param object_name: S3 object name. If not specified then file_name is used
:return: True if file was uploaded, else False
"""
# If S3 ... | 9,435 |
def parse_args():
"""
Parses command-line arguments and returns username, title of specified
repository and its' branch.
Returns: tuple (username, repo_name, branch).
Used only once in `main` method.
"""
DESC = 'Automatic license detection of a Github repository.'
parser = ArgumentParser... | 9,436 |
def get_parameters():
"""
Parse script arguments
"""
parser = argparse.ArgumentParser(prog='compile.py')
# config.h template parameters
parser.add_argument('os', type=str, default="LINUX", choices=available_os)
parser.add_argument('arch', type=str, default="X86", choices=available_archs)
... | 9,437 |
def collect_inventory_values(dataset, inventory_list, parameter_map):
"""
Collect inventories from a dataset.
"""
# Collect raw/unicode/clts for all relevant inventories
to_collect = []
for catalog in inventory_list.keys():
to_collect += list(
itertools.chain.from_iterable(i... | 9,438 |
def offset_resources(api, res_id, resource_list, position="top_left", align="horizontal", horizontal_offset=300,
vertical_offset=150, x_axis=100, y_axis=50):
"""
add resources to sandbox in different quadrants. choose stacking order
:param CloudShellAPISession api:
:param resource_l... | 9,439 |
def attest(att_stmt: AttestationStatement, att_obj: AttestationObject,
auth_data: bytes,
client_data_hash: bytes) -> Tuple[AttestationType, TrustedPath]:
"""Attest an attestation object.
Args:
att_stmt (AttestationStatement): The attestation statment.
att_obj (AttestationObjec... | 9,440 |
def Start(parser=None,
argv=sys.argv,
quiet=False,
add_pipe_options=True,
add_extract_options=False,
add_group_dedup_options=True,
add_sam_options=True,
add_umi_grouping_options=True,
return_parser=False):
"""set up an experiment.
... | 9,441 |
def sac(env_fn, actor_critic=core.mlp_actor_critic, ac_kwargs=dict(), seed=0,
steps_per_epoch=10000, epochs=10000, replay_size=int(1e6), gamma=0.99,
polyak=0.995, lr=1e-4, alpha=0.004, batch_size=256, start_steps=1000,
update_after=1000, update_every=1, num_test_episodes=0, max_ep_len=1000,
... | 9,442 |
def plot_progress_means(i, data, centroid_history, idx_history):
"""
Plot points on 2D plane
:param np.array data: array containing points
:param np.array centroid_history: saved centroids_history for every step
:param np.array centroid_history: saved centroids_history for every step
"""
K ... | 9,443 |
def flatten_list(a_list, parent_list=None):
"""Given a list/tuple as entry point, return a flattened list version.
EG:
>>> flatten_list([1, 2, [3, 4]])
[1, 2, 3, 4]
NB: The kwargs are only for internal use of the function and should not be
used by the caller.
"""
if parent_list... | 9,444 |
def list_to_csv_str(input_list: List) -> Text:
"""
Concatenates the elements of the list, joining them by ",".
Parameters
----------
input_list : list
List with elements to be joined.
Returns
-------
str
Returns a string, resulting from concatenation of list elements,
... | 9,445 |
def close_xray_safety_shutters():
"""Remote Frontend shutter"""
xray_safety_shutters_open.value = False
while not xray_safety_shutters_open.value == False and not task.cancelled:
sleep(0.2) | 9,446 |
def scheme_listp(x):
"""Return whether x is a well-formed list. Assumes no cycles."""
while x is not nil:
if not isinstance(x, Pair):
return False
x = x.second
return True | 9,447 |
def test_multisig_digprefix():
"""
Test multisig with self-addressing (digest) pre
"""
# Test sequence of events given set of secrets
secrets = [
'ArwXoACJgOleVZ2PY7kXn7rA0II0mHYDhc6WrBH8fDAc',
'A6zz7M08-HQSFq92sJ8KJOT2cZ47x7pXFQLPB0pckB3Q',
'AcwFTk-... | 9,448 |
def main():
"""
Runs the test
"""
args = mujoco_arg_parser().parse_args()
logger.configure()
train(args.env, num_timesteps=args.num_timesteps, seed=args.seed)
env = make_mujoco_env(args.env, args.seed)
model = PPO1(MlpPolicy, env, timesteps_per_actorbatch=2048, clip_param=0.2, entcoeff=... | 9,449 |
def check_license_analysis_message(context, message):
"""Check the message for the last license analysis."""
json_data = context.response.json()
actual = check_and_get_attribute(json_data, "message")
assert actual == message, \
"License service returns message {actual}, but other message {messag... | 9,450 |
def main(args):
"""Extract a MSR-VTT captions dataframe from the annotation files."""
with open(args.raw_data_path) as data_file:
data = json.load(data_file)
df = pd.DataFrame(columns=['vid_id', 'sen_id', 'caption'])
df_idx = 0
if args.continue_converting:
if os.path.is_file(args.in... | 9,451 |
def list_to_dict(config):
"""
Convert list based beacon configuration
into a dictionary.
"""
_config = {}
list(map(_config.update, config))
return _config | 9,452 |
def get_model(model_name, in_channels = 3, input_size = 224, num_classes = 1000):
"""Get model
Args :
--model_name: model's name
--in_channels: default is 3
--input_size: default is 224
--num_classes: default is 1000 for ImageNet
return :
--model: model instan... | 9,453 |
def check_edge_heights(
stack, shifts, height_resistance, shift_lines, height_arr, MIN_H, MAX_H,
RESOLUTION
):
"""
Check all edges and output an array indicating which ones are
0 - okay at minimum pylon height, 2 - forbidden, 1 - to be computed
NOTE: function not used here! only for test purpose... | 9,454 |
def linear_int_ext(data_pts, p, scale=None, allow_extrap=False):
"""
Interpolate data points to find remaining unknown values absent from
`p` with optionally scaled axes. If `p` is not in the range and
`allow_extra` == True, a linear extrapolation is done using the two data
points at the end corresp... | 9,455 |
def eval_in_els_and_qp(expression, ig, iels, coors,
fields, materials, variables,
functions=None, mode='eval', term_mode=None,
extra_args=None, verbose=True, kwargs=None):
"""
Evaluate an expression in given elements and points.
Parameter... | 9,456 |
def _add_tokenization_exceptions(language):
"""
Tons of tokenization exceptions for this dataset
:param language:
:return:
"""
#N2C2 2019 and Share 2013 Concept Normalization
language.tokenizer.add_special_case('empiricvancomycin', [{ORTH: "empiric"}, {ORTH: "vancomycin"}])
language.tok... | 9,457 |
def analyze_disc_learning(logpath, figpath=None, show=False, **kwds):
"""
Plot learning curves of a discriminator loaded from `logpath`.
"""
disc = load_disc_log(logpath)
fig = disc.plot_all(**kwds)
fig.tight_layout()
if show:
pyplot.show()
if figpath:
fig.savefig(figpath... | 9,458 |
def tracks2Dataframe(tracks):
"""
Saves lsit of Track objects to pandas dataframe
Input:
tracks: List of Track objects
Output:
df: Pandas dataframe
"""
if(len(tracks) == 0):
print("Error saving to CSV. List of tracks is empty")
return
#... | 9,459 |
def sift_point_to_best(target_point, point, sift_dist):
"""
Move a point to target point given a distance. Based on Jensen's inequality formula.
Args:
target_point: A ndarray or tensor, the target point of pca,
point: A ndarray or tensor, point of pca,
sift_dist: A float, distance w... | 9,460 |
def jrandom_counts(sample, randoms, j_index, j_index_randoms, N_sub_vol, rp_bins, pi_bins,
period, num_threads, do_DR, do_RR):
"""
Count jackknife random pairs: DR, RR
"""
if do_DR is True:
DR = npairs_jackknife_xy_z(sample, randoms, rp_bins, pi_bins, period=period,
jtags1=j... | 9,461 |
def cli():
"""Calliope: a multi-scale energy systems (MUSES) modeling framework"""
pass | 9,462 |
def process_not_inferred_array(ex: pa.ArrowInvalid, values: Any) -> pa.Array:
"""Infer `pyarrow.array` from PyArrow inference exception."""
dtype = process_not_inferred_dtype(ex=ex)
if dtype == pa.string():
array: pa.Array = pa.array(obj=[str(x) for x in values], type=dtype, safe=True)
else:
... | 9,463 |
def start_end(tf):
"""Find start and end indices of running streaks of True values"""
n = len(tf)
tf = np.insert(tf, [0, len(tf)], [False, False])
# 01 and 10 masks
start_mask = (tf[:-1] == 0) & (tf[1:] == 1)
end_mask = (tf[:-1] == 1) & (tf[1:] == 0)
# Locations
start_loc = np.whe... | 9,464 |
def open_with_lock(fpath, mode="rb+", timeout=None, **kwargs):
"""Open file with lock."""
if timeout is None:
timeout = constants.lock_timeout
with Lock(fpath, mode, timeout=timeout, **kwargs) as file_handle:
try:
yield file_handle
finally:
file_handle.flush()... | 9,465 |
def add_metadata_from_dis_file(dis_file, res):
"""
This function parses the .dis file and populates relevant metadata terms for the MODFLOWModel-
InstanceResource object being passed. Data from the .dis file is used to populate portions of
the StressPeriod, GridDimensions, and StudyArea terms. Infor... | 9,466 |
def get_posts(positive_tags: List[str], negative_tags: List[str]=None) -> Iterable[Post]:
"""Retrieve all post data that contains and doesn't contain certain tags.
Args:
positive_tags: The tags that the posts retrieved must contain.
negative_tags: Optional, blacklisted tags.
Yields:
... | 9,467 |
def remove_provinces(data, date_range):
"""
REMOVE PROVINCES
:param data: The Data received from the API
:param date_range: the date range of the data
:return: data after removing provinces
"""
countries_with_provinces = []
names_of_countries_with_prov = []
# get countries with prov... | 9,468 |
def testDisabled(component):
"""
Tests whether a component is enabled.
Parameters
----------
component: Component
The component used for testing
"""
if "disabled" not in component.params:
return
if component.params['disabled'].val:
alert(4305, component, strFiel... | 9,469 |
def face_detection() -> None:
"""Initiates face recognition script and looks for images stored in named directories within ``train`` directory."""
support.flush_screen()
train_dir = 'train'
os.mkdir(train_dir) if not os.path.isdir(train_dir) else None
speaker.speak(text='Initializing facial recognit... | 9,470 |
def plot_publish(families, targets=None, identifiers=None, keys=None):
"""Parse and plot all plugins by families and targets
Args:
families (list): List of interested instance family names
targets (list, optional): List of target names
identifiers (list, optional): List of interested di... | 9,471 |
def get_job_view(execution, prev_execution, stackstorm_url):
"""
Gets a job view from the specified execution and previous execution
:param execution: dict
:param prev_execution: dict
:param stackstorm_url: string
:return: dict
"""
current_time = datetime.datetime.utcnow()
hash_code... | 9,472 |
def mdetr_resnet101_refcocoplus(pretrained=False, return_postprocessor=False):
"""
MDETR R101 with 6 encoder and 6 decoder layers.
Trained on refcoco+, achieves 79.52 val accuracy
"""
model = _make_detr("resnet101")
if pretrained:
checkpoint = torch.hub.load_state_dict_from_url(
... | 9,473 |
def launch_duo_report(related_genome_id, duo_relation, duo_affected,
proband_genome_id, proband_sex, score_indels,
accession_id):
"""Launch a family report. Return the JSON response.
"""
# Construct url and request
url = "{}/reports/".format(FABRIC_API_URL)
... | 9,474 |
def readConfigFile(filePath):
""" Read the config file and generate a dictionnary containing an entry for
every modules of the installation. """
modules_attributes_list = []
confFile = open(filePath, "r")
for i, line in enumerate(confFile.readlines()):
# Remove everything that is written after "#" character ... | 9,475 |
def test_date_rounding():
""" https://github.com/SheetJS/ssf/issues/32 """
dt = 4018.99999998843
cases = [("mm/dd/yyyy hh:mm:ss.000", "12/31/1910 23:59:59.999"),
("mm/dd/yyyy hh:mm:ss.00", "01/01/1911 00:00:00.00"),
("mm/dd/yyyy hh:mm:ss.0", "01/01/1911 00:00:00.0"),
(... | 9,476 |
def unpad_pkcs7(data):
"""
Strips PKCS#7 padding from data.
Raises ValueError if padding is invalid.
"""
if len(data) == 0:
raise ValueError("Error: Empty input.")
pad_value = data[-1]
if pad_value == 0 or pad_value > 16:
raise ValueError("Error: Invalid padding.")
for i ... | 9,477 |
def create_output_channel(
mgr: sl_tag.TagManager, group: str, name: str, data_type: sl_tag.DataType
) -> sl_tag.TagData:
"""Create a FlexLogger output channel."""
# "Import" the channel into FlexLogger.
full_name = get_tag_prefix() + ".Import.Setpoint.{}.{}".format(group, name)
mgr.open(full_name, ... | 9,478 |
def slugify(value, allow_unicode=False):
"""
adapted from https://github.com/django/django/blob/master/django/utils/text.py
Convert to ASCII if 'allow_unicode' is False. Convert spaces or repeated
dashes to single dashes. Remove characters that aren't alphanumerics,
underscores, or hyphens. Convert ... | 9,479 |
def _checkwavelet(wavelet):
"""Check that wavelet belongs to pywt.wavelist
"""
wavelist = pywt.wavelist(kind='discrete')
if wavelet not in wavelist:
raise ValueError("'%s' not in family set = %s" % (wavelet,
wavelist)) | 9,480 |
def data_process(raw_text_iter: dataset.IterableDataset) -> Tensor:
"""Converts raw text into a flat Tensor."""
data = [torch.tensor(vocab(tokenizer(item)), dtype=torch.long) for item in raw_text_iter]
return torch.cat(tuple(filter(lambda t: t.numel() > 0, data))) | 9,481 |
def rna_view_redirect(request, upi, taxid):
"""Redirect from urs_taxid to urs/taxid."""
return redirect('unique-rna-sequence', upi=upi, taxid=taxid, permanent=True) | 9,482 |
def skip_for_variants(meta: MetaData, variant_keys: AbstractSet[str]) -> bool:
"""Check if the recipe uses any given variant keys
Args:
meta: Variant MetaData object
Returns:
True if any variant key from variant_keys is used
"""
# This is the same behavior as in
# conda_build.metad... | 9,483 |
def _is_safe_url(url, request):
"""Override the Django `is_safe_url()` to pass a configured list of allowed
hosts and enforce HTTPS."""
allowed_hosts = (
settings.DOMAIN,
urlparse(settings.EXTERNAL_SITE_URL).netloc,
)
require_https = request.is_secure() if request else False
retu... | 9,484 |
def bootstrap():
"""
Initialize all the infrastructure for the app.
:return:
"""
# Initialize the ORM
start_mappers() | 9,485 |
def test_full_pipeline() -> None:
"""Test the full pipeline."""
# Define a class that can send messages and one that can receive them.
class TestClassS:
"""Test class incorporating send functionality."""
msg = _TestMessageSenderBoth()
def __init__(self, target: Union[TestClassRSyn... | 9,486 |
def bn_calibration_init(m):
""" calculating post-statistics of batch normalization """
if getattr(m, 'track_running_stats', False):
# reset all values for post-statistics
m.reset_running_stats()
# set bn in training mode to update post-statistics
m.training = True
# if us... | 9,487 |
def use_blackontrans_style():
"""Use blackontrans matplotlib style"""
plt.style.use(resource_filename("pynba", "blackontrans.mplstyle")) | 9,488 |
def sanitize_file_lines(file):
"""Enumerate a line iterator and returns the pairs of (line number, line) that are cleaned.
:param iter[str] file: An iterable over the lines in a BEL Script
:rtype: iter[tuple[int,str]]
"""
line_iterator = sanitize_file_line_iter(file)
for line_number, line in l... | 9,489 |
def _server():
"""
Reconstitute the name of this Blueprint I/O Server.
"""
return urlparse.urlunparse((request.environ.get('wsgi.url_scheme',
'https'),
request.environ.get('HTTP_HOST',
... | 9,490 |
def get_range_api(spreadsheetToken, sheet_id, range, valueRenderOption=False):
"""
该接口用于根据 spreadsheetToken 和 range 读取表格单个范围的值,返回数据限制为10M。
:return:
"""
range_fmt = sheet_id + '!' + range
get_range_url = cfg.get_range_url.format(spreadsheetToken=spreadsheetToken, range=range_fmt)
headers = {
... | 9,491 |
def rotate_rboxes90(rboxes: tf.Tensor,
image_width: int,
image_height: int,
rotation_count: int = 1) -> tf.Tensor:
"""Rotate oriented rectangles counter-clockwise by multiples of 90 degrees."""
image_width = tf.cast(image_width, dtype=tf.float32)
image_h... | 9,492 |
def get_wrong_user_credentials():
"""
Monkeypatch GithubBackend.get_user_credentials to force the case where
invalid credentias were provided
"""
return dict(username='invalid',
password='invalid',
token='invalid',
remember=False,
remem... | 9,493 |
def get_points(coords, m, b=None, diagonal=False):
"""Returns all discrete points on a line"""
points = []
x1, y1, x2, y2 = coords[0], coords[1], coords[2], coords[3]
# vertical line
if m is np.nan:
# bottom to top
y = min(y1, y2)
while y <= max(y1, y2):
points.a... | 9,494 |
def get_region_geo(region_id):
"""Get Geo/TopoJSON of a region.
Args:
region_id (str): Region ID (e.g. LK-1, LK-23)
Returns:
Geo-spatial data as GeoPandasFrame
"""
region_type = get_entity_type(region_id)
region_to_geo = _get_region_to_geo(region_type)
return region_to_geo... | 9,495 |
def load_vocabulary(f):
"""
Load the vocabulary from file.
:param f: Filename or file object.
:type f: str or file
:return: Vocabulary
"""
v = Vocabulary()
if isinstance(f, str):
file_ = open(f, 'r')
else:
file_ = f
for line in file_:
wordid, word, word... | 9,496 |
def populate_objects(phylodata_objects, project_name, path_to_species_trees, path_to_gene_trees, path_to_ranger_outputs):
"""
this function will try and associate each phylodata object with the correct
species_besttree
gene_bootstrap_trees
and rangerDTL output files (if they exist)
args:
... | 9,497 |
def extract_peers_dataset(
work_dict,
scrub_mode='sort-by-date'):
"""extract_peers_dataset
Fetch the IEX peers data for a ticker and
return it as a pandas Dataframe
:param work_dict: dictionary of args
:param scrub_mode: type of scrubbing handler to run
"""
label = work_dic... | 9,498 |
def check_pipeline_can_be_updated(client, version_name, pipeline_name, project_name, data):
"""
Check if pipeline can be updated:
- If desired input/output is changed; check if other versions exists (besides current version)
:param ubiops.CoreApi client: the core API client to make requests to the API
... | 9,499 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.