content stringlengths 35 762k | sha1 stringlengths 40 40 | id int64 0 3.66M |
|---|---|---|
def get_work_log_queue():
""" json格式为::
{'func':'transform',
'kw':{ ... # 和前面task_queue相同
},
"runtime":{ # 队列运行相关信息
'created':12323423 #进入原始队列时间
'queue':'q01' # 是在哪个原子原子队列
'start':123213123 #转换开始时间
'end':123213123 #转换结束时间
'worker':'w01... | 26b2e3c73f7dd05b44659d3a02ca8d2b8205057e | 9,400 |
def is_first_buy(ka, ka1, ka2=None, pf=False):
"""确定某一级别一买
注意:如果本级别上一级别的 ka 不存在,无法识别本级别一买,返回 `无操作` !!!
一买识别逻辑:
1)必须:上级别最后一个线段标记和最后一个笔标记重合且为底分型;
2)必须:上级别最后一个向下线段内部笔标记数量大于等于6,且本级别最后一个线段标记为底分型;
3)必须:本级别向下线段背驰 或 本级别向下笔背驰;
4)辅助:下级别向下线段背驰 或 下级别向下笔背驰。
:param ka: KlineAnalyze
本级别
... | 5ea35d728f3ddfaa5cff09a2e735c480f1e3c622 | 9,401 |
def preprocess(path, l_pass=0.7, h_pass=0.01, bandpass=True, short_ch_reg=False, tddr=True, negative_correlation=False, verbose=False, return_all=False):
"""
Load raw data and preprocess
:param str path: path to the raw data
:param float l_pass: low pass frequency
:param float h_pass: high pass freq... | 01d508de322fa007886e34838911d2cccea79aab | 9,402 |
def geomapi_To2d(*args):
"""
* To intersect a curve and a surface. This function builds (in the parametric space of the plane P) a 2D curve equivalent to the 3D curve C. The 3D curve C is considered to be located in the plane P. Warning The 3D curve C must be of one of the following types: - a line - a circle - a... | 7a8a6436f364e933d71ba8fb47617f01b0e13b47 | 9,403 |
import yaml
def get_object_list():
"""Returns the object name list for APC2015.
Args:
None.
Returns:
objects (list): List of object name.
"""
pkg_path = rospkg.RosPack().get_path(PKG)
yaml_file = osp.join(pkg_path, 'data/object_list.yml')
with open(yaml_file) as f:
... | 7fd1268ef8804eb394a42a6b2fdc9fc223cd4316 | 9,404 |
def gtMakeTAKBlobMsg(callsign, text, aesKey=False):
"""
Assemble an ATAK plugin compatible chat message blob
(suitable for feeding to gtMakeAPIMsg() )
With optional AES encryption, if a key is provided
"""
body = (callsign + b': ' + text)[:230]
# Apply optional encryption (and base64 encod... | ecc562e92a72a0a6e0d5cc45563d1c89962d931b | 9,405 |
import re
def validate_json_with_extensions(value, rule_obj, path):
""" Performs the above match, but also matches a dict or a list. This it
just because it seems like you can't match a dict OR a list in pykwalify
"""
validate_extensions(value, rule_obj, path)
if not isinstance(value, (list, dict... | ef4d5744adf0c2d3ca326da66cbe608b306a2ca3 | 9,406 |
def artists_by_rating(formatter, albums):
"""Returns the artists sorted by decreasing mean album rating.
Only artists with more than 1 reviewed albums are considered.
"""
artist_tags = set([album["artist_tag"] for album in albums])
artists = []
# build the list of artists and compute their ratin... | fdf443973b4187650d95f76f8cde2a61ea7a1a3f | 9,407 |
def st_max(*args):
"""Max function.
Parameters
----------
x : float, int, MissingValue instance, or None
(2 or more such inputs allowed)
Returns
-------
max(x1, x2, ...) if any x is non-missing (with missing values ignored).
Otherwise, MISSING (".") returned.
"... | 978cab7522250541890c723fcf33d2ded9539293 | 9,408 |
def is_button_controller(device: Device) -> bool:
"""Return true if the device is a stateless button controller."""
return (
CAP_PUSHABLE_BUTTON in device.capabilities
or CAP_HOLDABLE_BUTTON in device.capabilities
or CAP_DOUBLE_TAPABLE_BUTTON in device.capabilities
) | aa16170469f6a65d2ed94ab251817e722082ef16 | 9,409 |
import numpy as np
import os
import nibabel as nb
def gen_acq_noddi(in_file, epi_params, alt_epi_params, readout, readout_alt):
"""
This is a function to generate the FSL topup acq.txt file
:param in_file:
:param epi_params:
:param alt_epi_params:
:param readout:
:param readout_alt:
:r... | 3201c1faeb1842ad4b2dd3d903d1c103572375cf | 9,410 |
def list_parts(bucket, key, upload_id):
"""Lists the parts that have been uploaded for a specific multipart upload.
This operation must include the upload ID, which you obtain by
sending the initiate multipart upload request (see
CreateMultipartUpload ). This request returns a maximum of 1,000
uplo... | eb343e071ce72ea326fc479934984fdff425dfec | 9,411 |
def leap_year():
"""
This functions seeks to return a leap year after user input << integer(4).
Rules for a leap year:
As you surely know, due to some astronomical reasons, years may be leap or common.
The former are 366 days long, while the latter are 365 days long.
Since the introduction of t... | 5cf459514ce768c1cf633fdddab5f986004bc1c8 | 9,412 |
import math
def parse(files, **kwargs):
"""Parse all BAM files."""
parsed = []
if kwargs["meta"].has_field("base_coverage"):
cov_range = kwargs["meta"].field_meta("base_coverage")["range"]
else:
cov_range = [math.inf, -math.inf]
if kwargs["meta"].has_field("read_coverage"):
... | c12b068f2a32052cbaa583a4704f86c25e577947 | 9,413 |
def login(request):
"""Login view for GET requests."""
logged_in = request.authenticated_userid is not None
if logged_in:
return {'logged_in': True,
'form_enabled': False,
'status': u'Already logged in',
'status_type': u'info'}
status = u''
s... | 8cab36d8d059d0683ef2e84a40cca5c99a27c6fc | 9,414 |
def of_type(_type, value_1, *args) -> bool:
"""
Check if a collection of values are of the same type.
Parameters:
_type (any): The type to check for.
value_1 (any): The first value to check.
*args (any): Rest of values to check against given type.
Return... | eab1e70655ff74b1cbfc338a893719b7f0681f4a | 9,415 |
import os
import numpy
def configuration(parent_package='', top_path=None):
"""[Placeholder].
Parameters
----------
parent_package :
top_path :
Returns
-------
configuration :
"""
build_path = build_mlpack()
config = Configuration('mlpack', parent_package, top_path)
... | 9724f21048071b0b6d7d213d943da77569514349 | 9,416 |
import yaml
def read_config(path):
"""
Reads the Kong config file (YAML).
"""
if path is None:
raise Exception(
"empty path provided. please provide a path using `--config=<config.yml>`"
)
with open(path, "r") as stream:
try:
return yaml.safe_load(st... | 343fabb8fa1c4cc78ace63466c864e50cf5dc974 | 9,417 |
def generate_grid_world(grid, prob, pos_rew, neg_rew, gamma=.9, horizon=100):
"""
This Grid World generator requires a .txt file to specify the
shape of the grid world and the cells. There are five types of cells: 'S' is
the starting position where the agent is; 'G' is the goal state; '.' is a
norma... | 753fa30327f2dddfb4a459fbb40e842b28b0eda8 | 9,418 |
def sqrt_quadrature_scheme(N_poly, N_poly_log):
""" Returns quadrature rule that is exact on 0^1 for
p(x) + q(x)sqrt(x) for deg(p) <= N_poly and deg(q) <= N_poly_sqrt.
"""
nodes, weights = sqrt_quadrature_rule(N_poly, N_poly_log)
return QuadScheme1D(nodes, weights) | c39539604955f473c0a77816090fe180645670ae | 9,419 |
def check_dataset_update(args, dataset):
"""Checks if the dataset information must be updated.
"""
return (args.dataset_attributes or
args.import_fields or
(args.shared_flag and r.shared_changed(args.shared, dataset)) or
(((hasattr(args, 'max_categories') and args.max_ca... | 005700a0d544333f018ec423a6e3d287ab982553 | 9,420 |
from typing import Dict
from typing import List
import json
def get_package_extras(provider_package_id: str) -> Dict[str, List[str]]:
"""
Finds extras for the package specified.
:param provider_package_id: id of the package
"""
if provider_package_id == 'providers':
return {}
with ope... | 15ac01740e60d2af73458b7ef46330708831a0ca | 9,421 |
def e(a: float, b: float) -> float:
"""
e = sqrt(1 + (b * b) / (a * a))
:param a: semi-major axis
:type a: float
:param b: semi-minor axis
:type b: float
:return: eccentricity
:rtype: float
"""
return np.sqrt(1 + (b * b) / (a * a)) | f2eec5065d735984daa5197b8401ec3a60914d25 | 9,422 |
from pathlib import Path
import sh
def parse_note(path: Path) -> dict:
""" convert note in plain text to a dictionary.
Line #1 ~ #5 are meta data of the note.
Line #9 to end is the body.
"""
header_line_number = 5
body_start_line = 9
res = {}
with open(path) as f:
for x... | 792f4bace60fa52b1a7cbeeaf0dabd881ffd4a24 | 9,423 |
def get_previous_sle_for_warehouse(last_sle, exclude_current_voucher=False):
"""get stock ledger entries filtered by specific posting datetime conditions"""
last_sle['time_format'] = '%H:%i:%s'
if not last_sle.get("posting_date"):
last_sle["posting_date"] = "1900-01-01"
if not last_sle.get("pos... | 7fdc0db05564cc54555784c474c7bc4cb33e280a | 9,424 |
import networkx as nx
def forest_str(graph, with_labels=True, sources=None, write=None, ascii_only=False):
"""
Creates a nice utf8 representation of a directed forest
Parameters
----------
graph : nx.DiGraph | nx.Graph
Graph to represent (must be a tree, forest, or the empty graph)
w... | 3486545035b9c2a8954102bdb92ebe9dd7b1fa24 | 9,425 |
import copy
def rotated_shower(shower, alt, az):
"""
Return a rotated shower object from a shower object and a direction (alt, az)
Parameters
----------
shower: shower class object
Returns
-------
copy of the given shower but rotated
"""
rot_shower = copy(shower)
rot_showe... | d420c408083a54837c87db405a8d65abfe46a5f8 | 9,426 |
def angle2circle(angles):
"""from degree to radians multipled by 2"""
return np.deg2rad(2 * (np.array(angles) + 7.5)) | 4c944725fd44480b5f7261c24608b3e06cec013a | 9,427 |
def _make_source(cls_source: str, cls_name: str, instance_method: str):
"""Converts a class source to a string including necessary imports.
Args:
cls_source (str): A string representing the source code of a user-written class.
cls_name (str): The name of the class cls_source represents.
... | 105ca5d34c0de2bfc81937aaaf14b4d610eaa35a | 9,428 |
def prepend_pass_statement(line: str) -> str:
"""Prepend pass at indent level and comment out the line."""
colno = num_indented(line)
right_side = line[colno:]
indent = " " * colno
return indent + "pass # " + right_side | 7d7156581167fcd6ec5c4afc482cf8bf3dea11bc | 9,429 |
from datetime import datetime
import time
def download_spot_by_dates(start=datetime(2011, 1, 1)):
"""
下载数据,存储为csv文件
:param start: 2011-01-01 最早数据
:return: True 下载文件 False 没有下载文件
"""
file_index = get_download_file_index(SPREAD_DIR, start=start)
if file_index.empty:
return False
... | 34574d4cd5d1985850fe681c3e5e4f6a3ebdc1a4 | 9,430 |
def truncate_range(data, percMin=0.25, percMax=99.75, discard_zeros=True):
"""Truncate too low and too high values.
Parameters
----------
data : np.ndarray
Image to be truncated.
percMin : float
Percentile minimum.
percMax : float
Percentile maximum.
discard_zeros : ... | a273db14c8f651dcbdaa39825e1150bd0cdc119b | 9,431 |
async def payment_list(request):
"""
---
description: Show outgoing payments, regarding {bolt11} or {payment_hash} if set Can only specify one of {bolt11} or {payment_hash}
tags:
- payments
produces:
- application/json
parameters:
- in: body
name: body
required: false
... | 3a4fe428adb10dd53e9b2564fea59cdc4b7c87ff | 9,432 |
import io
def write_opened(dir, file_dict, data_dict, verbose=True):
"""
read in dictionary with open files as values
and write data to files
"""
for game_id, vals in data_dict.items():
f = file_dict.get(game_id)
if not f:
fn = dir + str(game_id) + ".csv"
f... | eb3ac9b95b70df31eb1ea24b94b5e416966b7bc5 | 9,433 |
def get_accessible_cases(item, user):
"""Return all accessible for a cohort and user."""
return getattr(item, "get_accessible_cases_for_user")(user) | 42d54ebf672ce401ac311f9868f6b19f93418065 | 9,434 |
def aux_conv5(A, B, n, idx):
"""
Performs the convolution of A and B where B = A* (enumerate-for-loop)
:param A: Coefficients matrix 1 (orders, buses)
:param B: Coefficients matrix 2 (orders, buses)
:param c: last order of the coefficients in while loop
:param indices: bus indices array
:ret... | 0acaece3da86ac578672b7ab7e0f506117e752d3 | 9,435 |
def plot_phaseogram(phaseogram, phase_bins, time_bins, unit_str='s', ax=None,
**plot_kwargs):
"""Plot a phaseogram.
Parameters
----------
phaseogram : NxM array
The phaseogram to be plotted
phase_bins : array of M + 1 elements
The bins on the x-axis
time_bi... | b7a3b8aa0cf6a16e67e3d5059049082b6d308d7e | 9,436 |
def load_rapidSTORM_track_header(path):
"""
Load xml header from a rapidSTORM (track) single-molecule localization file and identify column names.
Parameters
----------
path : str, bytes, os.PathLike, file-like
File path for a rapidSTORM file to load.
Returns
-------
list of st... | 584baa4bd0a634608bb2c254314ad80a9c7650de | 9,437 |
def hex_to_byte(hexStr):
""" Convert hex strings to bytes. """
bytes = []
hexStr = ''.join(hexStr.split(" "))
for i in range(0, len(hexStr), 2):
bytes.append(chr(int(hexStr[i:i + 2], 16)))
return ''.join(bytes) | a424d65b0a02c0d10ee5c7c25409f4a0ce477528 | 9,438 |
def _vital_config_update(cfg, cfg_in):
"""
Treat a vital Config object like a python dictionary
Args:
cfg (kwiver.vital.config.config.Config): config to update
cfg_in (dict | kwiver.vital.config.config.Config): new values
"""
# vital cfg.merge_config doesnt support dictionary input
... | 35a0092013229f3b71a1ba06bbb660f861ef391c | 9,439 |
def SubscriberReceivedStartEncKeyVector(builder, numElems):
"""This method is deprecated. Please switch to Start."""
return StartEncKeyVector(builder, numElems) | 7c2875af0ba92e66f747bdeb2754f3123c337372 | 9,440 |
import struct
def _read_extended_field_value(value, rawdata):
"""Used to decode large values of option delta and option length
from raw binary form."""
if value >= 0 and value < 13:
return (value, rawdata)
elif value == 13:
return (rawdata[0] + 13, rawdata[1:])
elif value == 14:... | 12a1f665f133f6ea5ffc817bf69ec0a9e0e07dbc | 9,441 |
def add_uint(a, b):
"""Returns the sum of two uint256-ish tuples."""
a = from_uint(a)
b = from_uint(b)
c = a + b
return to_uint(c) | 0da42542210e72f30f00b1a41919cdad882963d0 | 9,442 |
def get_dcgan_args(parser, args=[]):
"""
parameters determing the DCGAN parameters
"""
# DCGAN:
# ------------------------------------------------------------------------
parser.add_argument(
"--lam", type=float, default=10, help="Factor for scaling gradient penalty"
)
parser.add... | 28d00721fad62ecbc381190b05d81fe578860f8e | 9,443 |
import os
def _gen_span_id() -> str:
"""Return 16 random hexadecimal digits.
The id is used for distributed tracing.
"""
return os.urandom(8).hex() | 4c70028da278eb26c947c9ca24e0c527f6744860 | 9,444 |
from pathlib import Path
def store_tabular_data(filepath: Path, use_stem: bool = True) -> None:
"""Reads the tabular data from filepath and stores it in-memory to be plotted asychronously.
Args:
filepath (Path): The tabular data file to be read and stored.
use_stem (bool, optional): Only stor... | 98c1c74aefe855690ad67ba0c6f09bd574c877ce | 9,445 |
import pkgutil
import io
def load_uci_credit_card(return_X_y=False, as_frame=False):
"""Loads the UCI Credit Card Dataset.
This dataset contains a sample of [Default of Credit Card Clients Dataset](https://www.kaggle.com/uciml/default-of-credit-card-clients-dataset).
Example:
```python
from sko... | ae388efcf82e0e6ff5be40ff5293d0b23d474735 | 9,446 |
def quad_lsq(x, y, verbose=False, itmax=200, iparams=[]):
"""
Fits a parabola to the data, more handy as it fits for
parabola parameters in the form y = B_0 * (x - B_1)**2 + B_2.
This is computationally slower than poly_lsq, so beware of its usage
for time consuming operations. Uses scipy odrpack, b... | 02dda2ba78ac6754b913941f2204ef4aa26d3f36 | 9,447 |
import os
def find_file(directory_name, cyclone_id_string, prefer_zipped=True,
allow_other_format=True, raise_error_if_missing=True):
"""Finds NetCDF file with SHIPS data.
:param directory_name: Name of directory with SHIPS data.
:param cyclone_id_string: Cyclone ID (must be accepted by
... | 4041340ecb9fc404eeabfb55a6732c5c4ede82be | 9,448 |
from typing import Tuple
import re
def _parse_cli_variable(mapping_str: str) -> Tuple[str, str]:
"""Checks that the input is of shape `name:value` and then splits it into a tuple"""
match = re.match(r"(?P<name>.+?):(?P<value>.+)", mapping_str)
if match is None:
raise ValueError(f'CLI variable inpu... | f701b7e85c45c2df35e1252721cd3215357909ba | 9,449 |
import json
def list_privileges_by_role(request, role):
"""
List sentry privilegs by role
:param request:
:param role: role name
:return: A Json array of SentryPrivileges: [p1, p2, p3...]
"""
sentry_privileges = _get_sentry_api(request.user).list_sentry_privileges_by_role("cdap", role)
sentry_privileg... | fbb488f6d55b3a51646bc0c74f4861677cc16912 | 9,450 |
from typing import Any
import torch
from typing import Union
def to_torch_as(x: Any, y: torch.Tensor) -> Union[Batch, torch.Tensor]:
"""Return an object without np.ndarray.
Same as ``to_torch(x, dtype=y.dtype, device=y.device)``.
"""
assert isinstance(y, torch.Tensor)
return to_torch(x, dtype=y.d... | c6d71e0b903b611653b07e0f55666672dc123602 | 9,451 |
from pathlib import Path
def AllenAtlas(res_um=25, par=None):
"""
Instantiates an atlas.BrainAtlas corresponding to the Allen CCF at the given resolution
using the IBL Bregma and coordinate system
:param res_um: 25 or 50 um
:return: atlas.BrainAtlas
"""
if par is None:
# Bregma ind... | 0f8b55c075104ee39d42a0989a4006fe5f1ae617 | 9,452 |
def get_devices_properties(device_expr,properties,hosts=[],port=10000):
"""
Usage:
get_devices_properties('*alarms*',props,
hosts=[get_bl_host(i) for i in bls])
props must be an string as passed to Database.get_device_property();
regexp are not enabled!
get_matching_device_prop... | 58ba6fa32d4118b60ca9eadfde1a28d2a98854d3 | 9,453 |
def atexit_shutdown_grace_period(grace_period=-1.0):
"""Return and optionally set the default worker cache shutdown grace period.
This only affects the `atexit` behavior of the default context corresponding to
:func:`trio_parallel.run_sync`. Existing and future `WorkerContext` instances
are unaffected.... | f7440172f40b00069b149254a689521373dbded0 | 9,454 |
import os
import fnmatch
def combine_files(root, pattern=None):
"""Combine all files in root path directory
Parameters:
root (str) : file path to directory of files
pattern (str) : optional file pattern to search for in directory
Returns:
combined files
"""
if pattern is ... | cca93f9cd62245071d36dcc345ff4da02f0bbcc8 | 9,455 |
def get_point(points, cmp, axis):
""" Get a point based on values of either x or y axys.
:cmp: Integer less than or greater than 0, representing respectively
< and > singhs.
:returns: the index of the point matching the constraints
"""
index = 0
for i in range(len(points)):
if cmp <... | b59035d390e83b45a0131e28c4acf7e302cf3e45 | 9,456 |
import os
def benchmark_parser_header_16(nb_headers, nb_fields, do_checksum=False):
"""
This method generate the P4 program to benchmark the P4 parser
:param nb_headers: the number of generic headers included in the program
:type nb_headers: int
:param nb_fields: the number of fields (16 bits) in... | b7b89823768dd4779fad9d48142d5b469aeb14bb | 9,457 |
import pathlib
def create_jobs_list(chunks, outdir, *filters):
# TO DO
# Figure out the packing/unpacking
"""
Create a list of dictionaries that hold information for the given
chunks
Arguments:
chunks: list: A list of lists. Each nested list contains the
filepaths to be processed
... | 433992eb34bc1f80d12f8cdcee3dbd99d04d22c1 | 9,458 |
import torch
def per_symbol_to_per_seq_probs(per_symbol_probs, tgt_out_idx):
""" Gather per-symbol probabilities into per-seq probabilities """
# per_symbol_probs shape: batch_size, seq_len, candidate_size
# tgt_out_idx shape: batch_size, seq_len
# output shape: batch_size, 1
return torch.prod(
... | fc39ac129b8bbffcb602c73bc67fcc44b1d354ed | 9,459 |
def solve_game(payoffs):
""" given payoff matrix for a zero-sum normal-form game,
return first mixed equilibrium (may be multiple)
returns a tuple of numpy arrays """
# .vertex_enumeration()
# .lemke_howson(initial_dropped_label=0) - does not return *all* equilibrium
game = nash.Game(payoffs)
... | 9eb0dd84592f9a2d135c79322f6c812b775b0e74 | 9,460 |
from functools import reduce
def zone_features(df, zfeatures, aufeatures):
"""Create zone features from the data
Args:
df (DataFrame): Input dataframe
zfeatures (list): List of zone median features
aufeatures (list): List of zone autocorr features
Return: 2 dataframes
"""
... | fb055e1c2fea040c95422818fbd6d16a97bf873f | 9,461 |
from typing import List
def get_active_validator_indices(validators: [ValidatorRecord]) -> List[int]:
"""
Gets indices of active validators from ``validators``.
"""
return [i for i, v in enumerate(validators) if is_active_validator(v)] | 14719147b49f903240e19fbaa46da8a40315a5cf | 9,462 |
def parse_decodes(sentences, predictions, lengths, label_vocab):
"""Parse the padding result
Args:
sentences (list): the tagging sentences.
predictions (list): the prediction tags.
lengths (list): the valid length of each sentence.
label_vocab (dict): the label vocab.
Retur... | bf40d8570e0a552853108e860fd193c0d9940e98 | 9,463 |
from datetime import datetime
def get_weekday(start_date, end_date, weekday_nums, repeat=None):
"""
获取一段时间范围内每个周天对应的日期
:param start_date:
:param end_date:
:param weekday_nums: list, 星期对应数字 0 ~ 6
:param repeat:
:return:
"""
sdate = datetime.datetime.strptime(start_date, date_patter... | 65e0495951647cbb6648a3a68d7fd2c7e1e2e88b | 9,464 |
def context_processor(target):
"""
Decorator that allows context processors with parameters to be assigned
(and executed properly) in a RequestContext
Example::
return render_to_response(
template_name,
context_instance=RequestContext(
request,
processors=[
... | 842395b29aedbfe23bb3332bf343b12d26519d97 | 9,465 |
def data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_committed_burst_size_delete(uuid, local_id): # noqa: E501
"""data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_committed_burst_size_delete
removes... | a3bc85df9fa77b210573058b640e47f41930ae0d | 9,466 |
import typing
import json
def decode_messages(fit_bytes: bytes) -> typing.List[typing.Dict]:
"""Decode serialized messages.
Arguments:
fit_bytes: Encoded messages
Returns:
Decoded messages
"""
messages = []
for line in fit_bytes.splitlines():
payload = json.loads(lin... | c56a805b5c2ffee3b48be7ae88ad6a91cddd4cc5 | 9,467 |
import os
def read_DELETE(msg, hosts):
"""Parse the DELETE request and send data to the response generator function
Args:
msg (String): The request message to parse
hosts (List): The array of hosts
Returns:
List: An array of information about the request, including status code,
... | 5fb54a512d34e4043125e3a5cbf89cf5b362a3d9 | 9,468 |
def iresnet101(pretrained=False, progress=True, **kwargs):
"""
Constructs the IResNet-101 model trained on Glint360K(https://github.com/deepinsight/insightface/tree/master/recognition/partial_fc#4-download).
.. note::
The required input size of the model is 112x112.
Args:
pretrained (b... | d986282b805de959cfa2d6707532d23f1c23c31b | 9,469 |
from typing import Dict
def get_full_jwt(user: User) -> Dict:
"""
Get a full jwt response from the username and uid token.
"""
return {
'access_token': create_access_token(identity=user, fresh=True),
'refresh_token': create_refresh_token(identity=user)
} | bbc4bc12352671878edc392717d58636475001c3 | 9,470 |
import re
from datetime import datetime
def GridSearch_Prophet(prophet_grid, metric='mape'):
"""
GridSearch tool to determine the optimal parameters for prophet
Args:
- prophet_grid: List of parameters. Enter it as list(ParameterGrid(prophet_grid)
- metric: String. Not used yet. May be us... | 324f6468109bfa52258d1ad6645692395be7859a | 9,471 |
def _check_max_features(importances, max_features):
"""Interpret the max_features value"""
n_features = len(importances)
if max_features is None:
max_features = n_features
elif isinstance(max_features, int):
max_features = min(n_features, max_features)
elif isinstance(max_feature... | 816daf9d99ac4ecd2d5024a3be63f793d7669e1f | 9,472 |
def map_blocks(func, data):
"""Curried version of Dask's map_blocks
Args:
func: the function to map
data: a Dask array
Returns:
a new Dask array
>>> f = map_blocks(lambda x: x + 1)
>>> f(da.arange(4, chunks=(2,)))
dask.array<lambda, shape=(4,), dtype=int64, chunksize=(2,)>
... | ab97911bb147ceb6d5350fcd16300926d2a89f8e | 9,473 |
from pathlib import Path
from typing import Optional
import os
def download_and_extract(
package: str,
directory: Path,
version: Optional[str] = None,
remove_after: bool = False
) -> Path:
"""Modified to allow avoiding removing files after.
Parameters
----------
packag... | a836a310894d1db38bccdfd5eb1388cb1acd78cd | 9,474 |
def premises_to_syllogism(premises):
"""
>>> premises_to_syllogism(["Aab", "Ebc"])
'AE1'
"""
figure = {"abbc": "1", "bacb": "2", "abcb": "3", "babc": "4"}[premises[0][1:] + premises[1][1:]]
return premises[0][0] + premises[1][0] + figure | a048d44acea1eb4c9346880a74547a9cd100ebf0 | 9,475 |
import re
def fix_fits_keywords(header):
"""
Update header keyword to change '-' by '_' as columns with '-' are not
allowed on SQL
"""
new_header = {}
for key in header.keys():
new_key = key.replace('-', '_')
new_header[new_key] = header[key]
# Temporary fix - needs to be ... | 0d8a2f502252051857a131944a4c31ba8ec9ff0e | 9,476 |
def request_password(email: str, mailer: Mailer, _tn: Translator):
"""
Create new hashed password and send mail..
:param email: Mail-address which should be queried
:param mailer: pyramid Mailer
:param _tn: Translator
:return: dict with info about mailing
"""
db_user = DBDiscussionSessi... | 09c9cbc164fc43fd953a3197c03cd7c27d758dba | 9,477 |
def is_sum_lucky(x, y):
"""This returns a string describing whether or not the sum of input is lucky
This function first makes sure the inputs are valid and then calculates the
sum. Then, it will determine a message to return based on whether or not
that sum should be considered "lucky"
"""
if x... | 081b5e8cc2657a00ea160e398fb00f84187e2ab6 | 9,478 |
import asyncio
def unsync_function(func, *args, **kwargs):
"""Runs an async function in a standard blocking way and returns output"""
return asyncio.run(func(*args, **kwargs)) | cd7c19bf226b78c9e3c4b19325e7acb4fcc90e21 | 9,479 |
from typing import Iterable
from typing import Union
from typing import List
from typing import Tuple
from typing import Any
from typing import Dict
def zip_results(name: str, recipes: Iterable[Recipe], cache=CacheType.Auto) \
-> Recipe[Union[List[Tuple[Any, ...]], Dict[Any, Tuple[Any, ...]]]]:
"""
Cr... | a1e0b7aa2d5071e485f49b0b7aa43343f8760ab2 | 9,480 |
def get_muscle_reference_dictionary():
"""
The
@article{bashkatov2011optical,
title={Optical properties of skin, subcutaneous, and muscle tissues: a review},
author={Bashkatov, Alexey N and Genina, Elina A and Tuchin, Valery V},
journal={Journal of Innovative Op... | b2bcedabce6309a11d0b1f8424ccefc06d7c8dee | 9,481 |
from typing import Optional
from typing import Tuple
def flake8_entrypoint(physical_line: str) -> Optional[Tuple[int, str]]:
"""Flake8 plugin entrypoint that operates on physical lines."""
match = RX_TODO_OR_ELSE.search(physical_line)
if match:
by = match.group(2)
pact = match.group(3).str... | e8e672f50f0f58842cbdd7d1e599a4df5b9e1be0 | 9,482 |
import shlex
def smartquotes(text):
"""
Runs text through pandoc for smartquote correction.
This script accepts a paragraph of input and outputs typographically correct
text using pandoc. Note line breaks are not retained.
"""
command = shlex.split('pandoc --smart -t plain')
com = Popen(... | bab6ec252495d8e279cdcde7f51f60331117bae2 | 9,483 |
def get_nearest_stations_xy(x, y, variable, n=1, stations=None, ignore=None):
"""find the KNMI stations that measure 'variable' closest to the
x, y coordinates
Parameters
----------
x : int or float
x coordinate in RD
y : int or float
x coordinate in RD
variable : str
... | 2d19e64054eb0813919e2a286c686b91e6d0a6f4 | 9,484 |
def parseStdInput():
"""Obtain a graph by parsing the standard input
as per the format specified in the PACE Challange.
"""
edges = [(1,2),(2,3),(3,4),(4,1)]
G = nx.Graph()
G.add_edges_from(edges)
return G | 4e26d50c590321241101586d9e83b2d53c7324ea | 9,485 |
def strfdelta(tdelta, fmt):
""" Get a string from a timedelta.
"""
f, d = Formatter(), {}
l = {"D": 86400, "H": 3600, "M": 60, "S": 1}
k = list(map(lambda x: x[1], list(f.parse(fmt))))
rem = int(tdelta.total_seconds())
for i in ("D", "H", "M", "S"):
if i in k and i in l.keys():
... | 01e7d3678cc88a08ec91e64dd59037294f17d9fe | 9,486 |
from pathlib import Path
def get_file_list_from_dir(parent_dir: Path, file_mask: str = "*") -> list:
"""
Recursively gets a list of files in a Path directory with the specified name mask
and return absolute string paths for files
"""
get_logger(__name__).debug("Iterating for files in '{}'".format(... | 16a4b89751343ea0e8472160ef376ae008819a81 | 9,487 |
def imputation_Y(X, model):
"""Perform imputation. Don't normalize for depth.
Args:
X: feature matrix from h5.
model: a trained scBasset model.
Returns:
array: a peak*cell imputed accessibility matrix. Sequencing depth
isn't corr... | 75e2de758c3544655d4332098d4398255770d7c3 | 9,488 |
def format_percent(x, _pos=None):
"""
plt.gca().yaxis.set_major_formatter(format_percent)
"""
x = 100 * x
if abs(x - round(x)) > 0.05:
return r"${:.1f}\%$".format(x)
else:
return r"${:.0f}\%$".format(x) | 27362ffa3b5726c135afdf034208eeca8d7c4f60 | 9,489 |
def is_row_and_col_balanced(T1, T2):
"""
Partial latin squares T1 and T2 are balanced if the symbols
appearing in row r of T1 are the same as the symbols appearing in
row r of T2, for each r, and if the same condition holds on
columns.
EXAMPLES::
sage: from sage.combinat.matrices.latin... | f0a9d1522da2fc079d4021603198e79c438de727 | 9,490 |
def submit(ds, entry_name, molecule, index):
"""
Submit an optimization job to a QCArchive server.
Parameters
----------
ds : qcportal.collections.OptimizationDataset
The QCArchive OptimizationDataset object that this calculation
belongs to
entry_name : str
The base entr... | 50a30a25af59906ce5636ce8a176e29befd27d60 | 9,491 |
def list_isos(apiclient, **kwargs):
"""Lists all available ISO files."""
cmd = listIsos.listIsosCmd()
[setattr(cmd, k, v) for k, v in kwargs.items()]
return(apiclient.listIsos(cmd)) | ad3117c6fc2c7bc4543372d306d0d476918d5898 | 9,492 |
from .....main import _get_bot
from typing import Optional
from typing import Union
async def edit_message_live_location(
token: str = TOKEN_VALIDATION,
latitude: float = Query(..., description='Latitude of new location'),
longitude: float = Query(..., description='Longitude of new location'),
chat_id... | 39eef452e570e4b00b08aa66aba6d4253bce154f | 9,493 |
def process_rollout(rollout, gamma, lambda_=1.0):
"""
given a rollout, compute its returns and the advantage
"""
batch_si = np.asarray(rollout.states)
batch_a = np.asarray(rollout.actions)
rewards = np.asarray(rollout.rewards)
action_reward = np.concatenate((batch_a,rewards[:,np.newaxis]), a... | da37f8b55294df5204f18772552e72d2131dd072 | 9,494 |
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Add sensors for passed config_entry in HA."""
coordinator: IotawattUpdater = hass.data[DOMAIN][config_entry.entry_id]
created = set()
@callback
def _create_entity(key: str) -> IotaWattSensor:
"""Create a sensor entity."... | 171d65acf5227ed9027481bcc2eb773bee52bbca | 9,495 |
from datetime import datetime
import calendar
def calculate_cost(cost, working_days_flag, month, nr_of_passes):
"""Calculate the monthly tolls cost"""
if working_days_flag:
passes = working_days(month) * nr_of_passes
else:
now = datetime.datetime.now()
passes = calendar.monthrange(... | 5221e0dedd56d7d3302aa88cdf9ad7feb67173a3 | 9,496 |
def e_dl() -> str:
"""Fetch size of archives to be downloaded for next system update."""
size = 'Calculating...'
with open(file=TMERGE_LOGFILE, mode='r', encoding='utf-8') as log_file:
for line in list(log_file)[::-1]:
reqex = search(r'(Size of downloads:.)([0-9,]*\s[KMG]iB)', line)
... | 1639d6cd0e78ca4f4adfceb75875f6b0de398a63 | 9,497 |
def get_model_fn():
"""Returns the model definition."""
def model_fn(features, labels, mode, params):
"""Returns the model function."""
feature = features['feature']
print(feature)
labels = labels['label']
one_hot_labels = model_utils.get_label(
labels,
params,
FLAGS.src... | ef006ff79c6979a61a745ebfecd599858ded0418 | 9,498 |
def build_node(idx, node_type):
""" Build node list
:idx: a value to id mapping dict
:node_type: a string describe the node type
:returns: a list of records of the nodes extracted from the mapping
"""
return rekey(idx, 'value', 'id:ID', {':LABEL': node_type}) | cf9cb20b152aa55ef7f37ee1e2f513d166e2b7c5 | 9,499 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.