content stringlengths 22 815k | id int64 0 4.91M |
|---|---|
def weight_variable_glorot(input_dim, output_dim, name=""):
"""Create a weight variable with Glorot & Bengio (AISTATS 2010)
initialization.
"""
init_range = np.sqrt(6.0 / (input_dim + output_dim))
initial = tf.random_uniform([input_dim, output_dim], minval=-init_range,
... | 14,800 |
def pareto_plot(column: pd.Series,
use_given_index: bool = False,
figsize: Tuple[int, int] = (12, 8),
return_freq_df: bool = False):
"""
Draw Pareto plot for categorical variable
Arguments:
----------
column: pd.Series
Categorical input
fi... | 14,801 |
def get_openshift_installer(
version=None,
bin_dir=None,
force_download=False,
):
"""
Download the OpenShift installer binary, if not already present.
Update env. PATH and get path of the openshift installer binary.
Args:
version (str): Version of the installer to download
b... | 14,802 |
async def activate_prompt_toolkit_async_mode() -> None:
"""Configure prompt toolkit to use the asyncio event loop.
Needs to be async, so we use the right event loop in py 3.5"""
global ACTIVATED_ASYNC_MODE
if not is_prompt_toolkit_3():
# Tell prompt_toolkit to use asyncio for the event loop.
... | 14,803 |
def parse_arguments():
"""
Parse input arguments and store them in a global variable.
Returns:
Parsed arguments.
"""
parser = argparse.ArgumentParser(description='Generates a lexicon for gender recognition.')
parser.add_argument('dataset', help='file with JSON objects to be processed')
... | 14,804 |
def remove_external_id(
role_name: str,
dir_path: Optional[str],
session=None,
client=None,
backup_policy: Optional[str] = "",
bucket: Optional[str] = None,
) -> Dict:
"""The remove_external_id method takes a role_name as a string
to allow the removal of an externalId condition.
... | 14,805 |
def find_binaries(fw_path):
"""
Gets a list of possible binaries within a firmare sample.
The list might contain false positives, angr will ignore them.
:param fw_path: firmware path
:return: a list of binaries
"""
cmd = "find \""+ fw_path + "\""
cmd += " -executable -type f... | 14,806 |
def get_core_count():
"""
Find out how many CPU cores this system has.
"""
try:
cores = str(compat.enum_cpus()) # 3.4 and up
except NotImplementedError:
cores = "1" # 3.2-3.3
else:
if compat.enum_cpus() is None:
cores = "1"
return cores | 14,807 |
def test_artefact_update() -> None:
"""Test updating a const artefact."""
db = Redis()
store = RedisStorage(db)
art = _graph.constant_artefact(db, store, b"bla bla")
with pytest.raises(TypeError):
_graph.set_data(db, store, art.hash, b"b", _graph.ArtefactStatus.done) | 14,808 |
def ASTTailrec(func):
"""
This approach involves modifying the ast tree so we can just stick a decorator on such as
```
@ASTTailrec
def fac(n, k=1):
if n == 1: return k
return fac(n-1, k*n)
```
This function has been heavily inspired by Robin Hillard's pipeop library at
... | 14,809 |
def vec2str(vec):
""" transform the vector to captcha str"""
_str = ""
for i in range(4):
v = vec[i*43: (i+1)*43]
_str += chr(np.argwhere(v == 1)[0][0] + ord('0'))
return _str | 14,810 |
def mnist(path=None, batchsize=20, xpreptrain=None, ypreptrain=None, dataset="train", **kwargs):
"""
Legacy MNIST loader.
:type path: str
:param path: Path to MNIST pickle file.
:type batchsize: int
:param batchsize: Batch size (no shit sherlock)
:type xpreptrain: prepkit.preptrain
:p... | 14,811 |
def aseta_hiiri_kasittelija(kasittelija):
"""
Asettaa funktion, jota käytetään hiiren klikkausten käsittelyyn.
Käsittelijää kutsutaan aina, kun hiiren nappi painetaan alas missä tahansa
peli-ikkunan sisällä. Käsittelijän tulee olla funktio, jolla on tasan neljä
parametria: x, y, nappi sekä muokkaus... | 14,812 |
def rand_pad(ctvol):
"""Introduce random padding between 0 and 15 pixels on each of the 6 sides
of the <ctvol>"""
randpad = np.random.randint(low=0,high=15,size=(6))
ctvol = np.pad(ctvol, pad_width = ((randpad[0],randpad[1]), (randpad[2],randpad[3]), (randpad[4], randpad[5])),
m... | 14,813 |
def images(stack_ref, region, output, field, hide_older_than, show_instances):
"""Show all used AMIs and available Taupage AMIs"""
stack_refs = get_stack_refs(stack_ref)
region = get_region(region)
check_credentials(region)
ec2 = boto3.resource("ec2", region)
instances_by_image = collections.... | 14,814 |
def store_event(event: AddProcessStatus) -> None:
"""Store an :class:`.AddProcessStatus` event."""
try:
db.session.add(ProcessStatusEvent(
created=event.created,
event_id=event.event_id,
submission_id=event.submission_id,
process_id=event.process_id,
... | 14,815 |
def test_nbconvert(container, test_file, output_format):
"""Check if nbconvert is able to convert a notebook file"""
host_data_dir = os.path.join(THIS_DIR, "data")
cont_data_dir = "/home/jovyan/data"
output_dir = "/tmp"
LOGGER.info(
f"Test that the example notebook {test_file} can be convert... | 14,816 |
def _improve(tour: np.ndarray, matrix: np.ndarray, neighbours: np.ndarray, dlb: np.ndarray,
it1: int, t1: int, solutions: set, k: int) -> Tuple[float, np.ndarray]:
""" Последовательный 2-opt для эвристики Лина-Кернига
tour: список городов
matrix: матрица весов
neighbours: набор кандидатов
... | 14,817 |
def plot_step_with_errorbar(lefts, widths, y_coords, y_errs,
errors_enabled=True, use_errorrects_for_legend=False, **kwargs):
"""Makes a step plot with error bars."""
lefts.append(lefts[-1] + widths[-1])
y_coords.append(y_coords[-1])
# prevent that we have labels for the step... | 14,818 |
def giveKarma(bot, trigger):
"""Increases/decreases a user's karma - no spaces allowed"""
nick = trigger.group(1)
nickdb = nick.lower()
change = [0,1][trigger.group(2) == '++']
# command user matches karma target - or multiple karma in single line
if (nickdb == trigger.nick.lower()) or trig... | 14,819 |
def intensity_modification(x):
""" Intensity modification
Parameters
x: Tensor
Returns
x: Tensor
"""
x = x + tf.random.uniform(shape=[], minval=-0.05, maxval=0.05, dtype=tf.dtypes.float32)
return x | 14,820 |
def show_learning_curve(
est: BaseEstimator,
conf_mat_labels: List,
X_train: DataFrame,
y_train: Series,
X_test: DataFrame,
y_test: Series,
scoring_metric: str = "f1_micro",
cv: StratifiedKFold = StratifiedKFold(n_splits=12),
sizes: np.linspace = np.linspace(0.3, 1.0, 10),
fig_si... | 14,821 |
def test_pype_get_arguments_group_str_interpolate():
"""Parse group as interpolated str input from context."""
context = Context({
'group': 'gr',
'pype': {
'name': 'pipe name',
'groups': '{group}',
}
})
with get_arb_pipeline_scope(context):
(pipel... | 14,822 |
def FormIdProperty(expression, **kwargs):
"""
Create a StringProperty that references a form ID. This is necessary because
form IDs change when apps are copied so we need to make sure we update
any references to the them.
:param expression: jsonpath expression that can be used to find the field
... | 14,823 |
def remember_subreddit(name=None):
"""Add current subreddit to history."""
if name:
last = wf.cached_data('--last', max_age=0, session=True) or {}
sr = last.get(name)
if not sr: # must be a multi
sr = dict(name=name, title=name, type="public",
url=subre... | 14,824 |
def gen_lang(lang, queue):
"""generate data for a language"""
try:
start_time = time.time()
print("Lang: %s: generating..." % lang)
# list(str)
all_words = init_words(lang)
output_filename = "output/%s.csv" % lang
make_file_dirs(output_filename)
with open... | 14,825 |
def do_roll(dice: int, sides: int, _: int):
"""Given an amount of dice and the number of sides per die, simulate a dice roll and return
a list of ints representing the outcome values.
Modifier is ignored.
"""
dice = dice or 1
sides = sides or 1
values = sorted(((secrets.randbelow(sides) + 1... | 14,826 |
def compute_logp_independent_block(X, alpha=None):
"""Compute the analytical log likelihood of a matrix under the
assumption of independence.
"""
if alpha is None: alpha = np.ones(X.shape[1])
logp_ib = gammaln(alpha.sum()) - (gammaln(alpha)).sum()
logp_ib += gammaln(X.sum(0) + alpha).sum() - gam... | 14,827 |
def check_validity_label(labels):
"""
Check to see whether it makes a valid tuple
Parameters:
-----------
labels: A tuple of labels (Object_1, Object_2, Object_3,
Return:
-------
"""
# Event is None -> All other values are None
if labels[3] == 0:
for... | 14,828 |
def flat_proj(v1, v2):
""" Returns the flat projection of direction unit vector, v1 onto v2 """
temp1 = np.cross(v1, v2)
temp2 = np.cross(temp1, v1)
return proj(temp2, v2) | 14,829 |
def marching_cubes_naive(
volume_data_batch: torch.Tensor,
isolevel: Optional[float] = None,
spacing: int = 1,
return_local_coords: bool = True,
) -> Tuple[List[torch.Tensor], List[torch.Tensor]]:
"""
Runs the classic marching cubes algorithm, iterating over
the coordinates of the volume_dat... | 14,830 |
def get_reshaped_ann_input(begin_state, new_state, action, pieces_player_begin, dice):
""" save STATE and ACTION into 1-dimensional np.array. This should be an input to a ANN """
# look for the position of the given pawn before and after a move
current_player = 0
input_ann = np.array(begin_state)
in... | 14,831 |
def _test_series(case, site, url, expected):
"""
Helper to use in site-specific test cases. See test_kissmanga.py for usage.
"""
resp = site.get_manga_seed_page(url)
if resp.status_code != 200:
raise Exception('Failed to download series html')
html = resp.text
series = site.series_in... | 14,832 |
def gc_resnet101(num_classes):
"""Constructs a ResNet-101 model.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
model = ResNet(GCBottleneck, [3, 4, 23, 3], num_classes=num_classes)
model.avgpool = nn.AdaptiveAvgPool2d(1)
return model | 14,833 |
def _write_data(x, y, sett, jtv=None):
""" Format algorithm output.
Args:
jtv (torch.tensor, optional): Joint-total variation image, defaults to None.
Returns:
dat_y (torch.tensor): Reconstructed image data, (dim_y, C).
pth_y ([str, ...]): Paths to reconstructed images.
lab... | 14,834 |
def compute_seatable_votes(votes, votetypes):
"""Compute the seatable votes.
Parameters
----------
votes: pandas.DataFrame
the votes of the seatable votes.
votetypes: dict
the information of the different types of vote variables.
Returns
-------
seatable_votes: numpy.nd... | 14,835 |
async def read_cookie(refresh_token: Optional[str] = Cookie(None)) -> JSONResponse:
"""Reads a cookie.
Args:
refresh_token: Name of the cookie.
Returns:
JSONResponse:
Returns the value of the cookie as a json blurb.
"""
if refresh_token:
return JSONResponse(
... | 14,836 |
def test_sort_values_simple_no_order():
""" It should sort dataframe """
data = pd.DataFrame(
[
{'variable': 'toto', 'Category': 2, 'value': 300},
{'variable': 'toto', 'Category': 3, 'value': 100},
{'variable': 'toto', 'Category': 4, 'value': 250},
{'varia... | 14,837 |
def get_role_output(role_id: Optional[pulumi.Input[str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetRoleResult]:
"""
Use this data source to access information about an existing resource.
"""
... | 14,838 |
def readAirfoilFile(fileName, bluntTe=False, bluntTaperRange=0.1, bluntThickness=0.002):
"""Load the airfoil file"""
f = open(fileName)
line = f.readline() # Read (and ignore) the first line
r = []
try:
r.append([float(s) for s in line.split()])
except Exception:
pass
while... | 14,839 |
def batchRenderBegin(info, userData, *args, **kwargs):
"""
Hook called before a render begins. The render will be blocked
until this function returns.
:param info: Empty dictionary for now. Might have parameters in the future.
:param userData: Object that will be carried over into the render end ... | 14,840 |
def get_args():
"""
Return the args from the arg parser.
:return: args (arg parser object).
"""
arg_parser = argparse.ArgumentParser()
arg_parser.add_argument('-d',
dest='debug',
action='store_true',
default=F... | 14,841 |
def run(
input_file=parser.get_default("input_file"),
event_level=parser.get_default("event_level"),
):
"""
The inner loop for sending syslog lines as events and breadcrumbs to Sentry.
Expects the Sentry Python logging integration to be initialized before being
called.
"""
for syslog_li... | 14,842 |
def depListToArtifactList(depList):
"""Convert the maven GAV to a URL relative path"""
regexComment = re.compile('#.*$')
#regexLog = re.compile('^\[\w*\]')
artifactList = []
for nextLine in depList:
nextLine = regexComment.sub('', nextLine)
nextLine = nextLine.strip()
gav = m... | 14,843 |
def configure_plugins_plugin_install_to_version(request, pk, version):
"""
View rendering for the install to version modal interface
:param request: Request
:param pk: The primary key for the plugin
:param version: The version to install
:return: a renderer
"""
plugin = get_object_or_40... | 14,844 |
def save_model(model, model_filepath):
"""Stores the model in a pickle file."""
pkl_filename = model_filepath
with open(pkl_filename, 'wb') as file:
pickle.dump(model, file) | 14,845 |
def test_certificates(host, site):
"""Validate that the letsencrypt certificates are set up """
assert host.file(f"/etc/letsencrypt/live/{site}/fullchain.pem").exists
assert host.file(f"/etc/letsencrypt/live/{site}/privkey.pem").exists
site_conf = host.file(f"/etc/nginx/sites-available/{site}")
ass... | 14,846 |
def _parameters_to_vector(parameters):
"""
This fix is required for pytorch >= 1.6.0, due to the change
in memory format promotion rule.
For more info, check:
* https://github.com/pytorch/pytorch/pull/37968
* https://github.com/pytorch/pytorch/releases/tag/v1.6.0
and search "Note: BC-break... | 14,847 |
def create_app(config, enable_config_file=False):
"""
创建应用
:param config: 配置信息对象
:param enable_config_file: 是否允许运行环境中的配置文件覆盖已加载的配置信息
:return: 应用
"""
app = create_flask_app(config, enable_config_file)
# 创建Snowflake ID worker
from utils.snowflake.id_worker import IdWorker
app.id_w... | 14,848 |
def select_variables(expr):
"""When called on an expression, will yield selectors to the variable.
A selector will either return the variable (or equivalent fragment) in
an expression, or will return an entirely new expression with the
fragment replaced with the value of `swap`.
e.g.
>>> from ... | 14,849 |
def get_login(name_p: str, pass_p: str, auth_error: bytes = b'') -> Callable:
"""Decorator to ensure a player's login information is correct."""
# NOTE: this function does NOT verify whether the arguments have
# been passed into the connection, and assumes you have already
# called the appropriate decor... | 14,850 |
def map_visualize(df: gpd.GeoDataFrame,
lyrs='s',
scale=0.5,
figsize = (12,9),
color = "red",
ax = None,
fig=None,
*args, **kwargs):
"""Draw the geodataframe with the satellite image ... | 14,851 |
def unorm_to_byte(x):
"""float x in [0, 1] to an integer [0, 255]"""
return min(int(256 * x), 255) | 14,852 |
def eigh(a, largest: bool = False):
"""
Get eigenvalues / eigenvectors of hermitian matrix a.
Args:
a: square hermitian float matrix
largest: if True, return order is based on descending eigenvalues, otherwise
ascending.
Returns:
w: [m] eigenvalues
v: [m, m]... | 14,853 |
def test_param_reorder():
"""Parameters can be reordered and doesn't affect the outcome."""
standard = pytest.mark.parametrize(
argnames="foo,bar,baz",
argvalues=[
(3, "something", 777),
(None, -100, "aaaaa"),
([10, 20, 30], ..., 0),
],
)
wrapp... | 14,854 |
def lowpass(x, dt, fc, order=5):
"""
Low pass filter data signal x at cut off frequency fc, blocking harmonic content above fc.
Parameters
----------
x : array_like
Signal
dt : float
Signal sampling rate (s)
fc : float
Cut off frequency (Hz)
order : int, opti... | 14,855 |
def doRunFixPlanets(msName):
"""Generate code for running fixplanets on fields with (0,0) coordinates"""
print('\n*** doRunFixPlanets ***')
fieldIds = sfsdr.getFieldsForFixPlanets(msName)
if len(fieldIds) != 0:
casaCmd = ''
mytb = aU.createCasaTool(tbtool)
mytb.open(msName+'/... | 14,856 |
def add_name_suffix(
suffix, obj_names=None, filter_type=None, add_underscore=False, search_hierarchy=False,
selection_only=True, **kwargs):
"""
Add prefix to node name
:param suffix: str, string to add to the end of the current node
:param obj_names: str or list(str), name of list of no... | 14,857 |
def checksum(hdpgroup: list,
algorithm: str = 'CRC32',
chktag: str = '\'α') -> list:
"""List of checksums-like for detection of Non-intentional data corruption
See https://en.wikipedia.org/wiki/Cksum
See https://en.wikipedia.org/wiki/Checksum
Args:
hdpgroup (list): li... | 14,858 |
def run_job(runner, runner_thread, queue, function, input):
""" Call this to start a new job """
runner.job_function = function
runner.job_input = input
runner.comm_queue = queue
runner_thread.start() | 14,859 |
def install():
"""Install Storyboard Pro specific functionality of avalon-core.
This function is called automatically on calling
`api.install(storyboardpro)`.
"""
print("Installing Avalon Storyboard Pro...")
pyblish.api.register_host("storyboardpro") | 14,860 |
def dsum(i0,i1,step = 1, box=[]):
""" for a range of fits files
compute the mean and dispersion from the mean
"""
for i in range(i0,i1+1,step):
ff = 'IMG%05d.FIT' % i
h1, d1 = getData(ff,box)
#very specific for 16 bit data, since we want to keep the data in uint16
bze... | 14,861 |
def test_sync_buckets(db):
"""
Test that bucket syncing only pulls study buckets
"""
client = boto3.client("s3")
bucket1 = client.create_bucket(Bucket="not-a-study")
bucket2 = client.create_bucket(Bucket="kf-dev-sd-00000000")
assert Bucket.objects.count() == 0
sync_buckets()
assert... | 14,862 |
def _env_translate_obs(obs):
"""
This should only be used for the Tiger ENV.
Parameters
----------
obs : list or array-like
The observation to be translated.
Returns
-------
str
A representation of the observation in English.
"""
if obs[0] == 1:
return '... | 14,863 |
def voter(address):
"""
Returns voter credentials.
Parameters:
address: address
Returns:
list of three values addresss (str), is_voter (bool),
voted (bool).
"""
return contract.functions.voters(address).call() | 14,864 |
def E_disp_z(m, N, j_star=3.):
"""Vertical displacement as a function of vertical wavenumber."""
num = E0*b**3*N0**2
den = 2*j_star*np.pi*N**2 * (1 + m/beta_star(N, j_star))**2
return num/den | 14,865 |
def get_stereo_image():
"""Retrieve one stereo camera image
Returns:
(mat): cv2 image
"""
img = core.get_stereo_image()
if img is not None:
return img
else:
return None | 14,866 |
def f():
"""<caret>
class Class:
"""
bar
""" | 14,867 |
def request_set_arm_state(token: str, arm_state: str):
"""Request set arm state."""
headers = {
'Authorization': 'Bearer %s' % token,
'Content-Type': 'application/json'
}
payload = {
"Created": int(time.time()),
"AppVersion": APP_VERSION,
"AppType": APPTYPE,
... | 14,868 |
def get_all_config(filename=None):
"""
Set default configuration options for configparse
Config with defaults settings if no file will be passed
Also with defaults sections and defaults keys for missing options in config
:param filename: options config file to read
:return: configparser object w... | 14,869 |
def check_for_end_or_abort(e):
"""Return a closure checking for END or ABORT notifications
Arguments:
e -- event to signal when the action is completed
(will be set when an END or ABORT occurs)
"""
def check(notification, e = e):
print("EVENT : " + \
Base_pb2.ActionEve... | 14,870 |
def check_vacancy_at_cell(house_map, cell):
"""
Return True if the given cell is vacant.
Vacancy is defined as a '0' in the house map at the given coordinates.
(i.e. there is no wall at that location)
"""
x = cell[0]
y = cell[1]
if not 0 <= x < MAP_WIDTH:
return False
... | 14,871 |
def extract_text_and_vertices(x: Dict[str, str]):
"""Extracts all annotations and bounding box vertices from a single OCR
output from Google Cloud Vision API.
The first element is the full OCR. It's equivalent to the output of
`extract_full_text_annotation` for the same OCR output.
Args:
... | 14,872 |
def _set(name, par, val):
"""Set a source parameter."""
import sherpa.astro.ui as sau
sau.set_par('{name}.{par}'.format(**locals()), val)
# try:
# exec(name + '.' + par + '=' + str(val))
# except Exception as e:
# print e | 14,873 |
def test_grid_queue():
"""Test GridQueue"""
queue = GridQueue(40, 2, "B")
push1 = queue.push()
push2 = queue.push()
with pytest.raises(GridSiteException):
queue.push()
pop1 = queue.pop()
pop2 = queue.pop()
with pytest.raises(GridSiteException):
queue.pop()
assert push... | 14,874 |
def cspace3(obs, bot, theta_steps):
"""
Compute the 3D (x, y, yaw) configuration space obstacle for a lit of convex 2D obstacles given by [obs] and a convex 2D robot given by vertices in [bot] at a variety of theta values.
obs should be a 3D array of size (2, vertices_per_obstacle, num_obstacles)
bot ... | 14,875 |
def Graph(backend:Optional[str]=None) -> BaseGraph:
"""Returns an instance of an implementation of :class:`~pyzx.graph.base.BaseGraph`.
By default :class:`~pyzx.graph.graph_s.GraphS` is used.
Currently ``backend`` is allowed to be `simple` (for the default),
or 'graph_tool' and 'igraph'.
This method is the prefe... | 14,876 |
def describe_stack_events(StackName=None, NextToken=None):
"""
Returns all stack related events for a specified stack in reverse chronological order. For more information about a stack's event history, go to Stacks in the AWS CloudFormation User Guide.
See also: AWS API Documentation
:example:... | 14,877 |
def set_param(component, param, value):
"""
Sets the specified parameter to a particular value.
Args:
component (`BondGraphBase`): The particular component.
param: The parameter to set
value: The value to assign it to, may be None
"""
component.set_param(param, value) | 14,878 |
def get_carb_data(data, offset=0):
""" Load carb information from an issue report cached_carbs dictionary
Arguments:
data -- dictionary containing cached carb information
offset -- the offset from UTC in seconds
Output:
3 lists in (carb_values, carb_start_dates, carb_absorption_times)
form... | 14,879 |
def _discover_bounds(cdf, tol=1e-7):
"""
Uses scipy's general continuous distribution methods
which compute the ppf from the cdf, then use the ppf
to find the lower and upper limits of the distribution.
"""
class DistFromCDF(stats.distributions.rv_continuous):
def cdf(self, x):
... | 14,880 |
def ml_app_instances_ml_app_instance_id_get(ml_app_instance_id): # noqa: E501
"""ml_app_instances_ml_app_instance_id_get
# noqa: E501
:param ml_app_instance_id: MLApp instance identifier
:type ml_app_instance_id: str
:rtype: None
"""
return 'do some magic!' | 14,881 |
def transcribe_file_with_word_time_offsets(speech_file,output_path):
"""Transcribe the given audio file synchronously and output the word time
offsets."""
from google.cloud import speech
from google.cloud.speech import enums
from google.cloud.speech import types
client = speech.SpeechClient()
... | 14,882 |
def plot_distribution2D(results,n_qubits,savepath,title = None, clear_fig = True):
"""plots diffusion for 2D data"""
plt.rcParams.update({'figure.figsize': (10,10)})
y,x,probability_density = results["dimension_0"],results["dimension_1"],results["probability_density"]
axes_limit = (2**n_qubits)-1
if... | 14,883 |
def generate_name(style: str = 'underscore', seed: int = None) -> str:
"""Generate a random name."""
if seed is not None:
random.seed(seed)
return format_names(random_names(), style=style) | 14,884 |
def argunique(a, b):
"""
找出a--b对应体中的唯一对应体,即保证最终输出的aa--bb没有重复元素,也没有多重对应
:param a:
:param b:
:return: aaa, bbb 使得aaa-bbb是唯一对
"""
# 先对a中元素进行逐个检查,如果第一次出现,那么添加到aa中,如果不是第一次,那么检查是否一致,不一致则设置成-1
# 设置成-1,代表a中当前元素i有过一对多纪录,剔除。同时-1也不会被再匹配到
seta = {}
for i, j in zip(a, b):
if i not in ... | 14,885 |
def create_config_flow(hass: core.HomeAssistant, host: str) -> None:
"""Start a config flow."""
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data={"host": host},
)
) | 14,886 |
def move(obj, direction):
"""
Moves object by (dx, dy).
Returns true if move succeeded.
"""
goal = obj.pos + direction
if (goal.x < 0 or goal.y < 0 or
goal.x >= obj.current_map.width or
goal.y >= obj.current_map.height):
# try_ catches this for the player, but nee... | 14,887 |
def addMedicine(medicine: object):
"""Data required are "name", "description", "price", "quantity", "medicalId" """
return mr.makePostRequest(mr.API + "/medicine/", medicine) | 14,888 |
def get_train():
""" Training data generator """
for file in train_files:
print("Train File: ", file)
img = ( np.array(nc.Dataset(file, "r")["ims"][0:1])[0], np.array(nc.Dataset(file, "r")["migrants"][0:1]) )
yield img | 14,889 |
def begin(command, project, ename, group):
"""
Begin a run in the database log.
Args:
command: The command that will be executed.
pname: The project name we belong to.
ename: The experiment name we belong to.
group: The run group we belong to.
Returns:
(run, ses... | 14,890 |
def test_ultrasound_distance() -> None:
"""Test that we can read an ultrasound distance."""
backend = SBArduinoConsoleBackend(
"TestBoard",
console_class=MockConsole,
)
backend._console.next_input = "1.23" # type: ignore
metres = backend.get_ultrasound_distance(3, 4)
assert met... | 14,891 |
def fortran_library_item(lib_name,
sources,
**attrs
): #obsolete feature
""" Helper function for creating fortran_libraries items. """
build_info = {'sources':sources}
known_attrs = ['module_files','module_dirs',
... | 14,892 |
def drawPolygon(t,r,n):
"""
draw a polygon of n sides, centered at (0,0)
r is radius of the circle that would circumscribe the polygon
leave turtle at position (0,0) facing right
"""
# pick up the pen, move to the starting point, and put down the pen
t.up(); t.goto(r,0); t.down()
#... | 14,893 |
def copy_generator(generator):
"""Copy an existing numpy (random number) generator.
Parameters
----------
generator : numpy.random.Generator or numpy.random.RandomState
The generator to copy.
Returns
-------
numpy.random.Generator or numpy.random.RandomState
In numpy <=1.16... | 14,894 |
def uptime():
"""Returns a datetime.timedelta instance representing the uptime in a Windows 2000/NT/XP machine"""
import os, sys
import subprocess
if not sys.platform.startswith('win'):
raise RuntimeError, "This function is to be used in windows only"
cmd = "net statistics server"
p = su... | 14,895 |
def sort_from_avro(df: 'pd.DataFrame', cur_filename: str, order_folder: str) -> 'pd.DataFrame':
"""Shuffle a dataframe with the given seed
:param df: the input dataframe
:type df: pandas.DataFrame
:param cur_filename: the initial file name
:type cur_filename: str
:param order_folder: the order_... | 14,896 |
def test_pages_kingdom_successful(args, protein_gen_success, cazy_home_url, monkeypatch):
"""Test parse_family_by_kingdom() when all is successful."""
test_fam = Family("famName", "CAZyClass", "http://www.cazy.org/GH14.html")
def mock_get_pag(*args, **kwargs):
return ["http://www.cazy.org/GH14_all... | 14,897 |
def convert_yolo(
df: pd.DataFrame,
root: Union[str, os.PathLike, PosixPath],
copy_images: bool = False,
save_under: Optional[str] = None,
output_dir: Optional[Union[str, os.PathLike, PosixPath]] = None,
):
"""converts to yolo from master dataframe
Args:
df (pd.DataFrame): the maste... | 14,898 |
def _truncate_and_pad_token_ids(token_ids, max_length):
"""Truncates or pads the token id list to max length."""
token_ids = token_ids[:max_length]
padding_size = max_length - len(token_ids)
if padding_size > 0:
token_ids += [0] * padding_size
return token_ids | 14,899 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.