content stringlengths 22 815k | id int64 0 4.91M |
|---|---|
def test(test_batches: torch.utils.data.DataLoader,
model: nn.Module,
criterion: nn.CrossEntropyLoss) -> None:
"""Test the model
Args:
test_batches: batch loader of test images
model: the network to test
criterion: calculator for the loss
"""
test_loss = 0.
corr... | 15,000 |
def color_parser(color: str, color_dicts: list = None) -> tuple:
"""
convert a string with RGB/matplotlib named colors to matplotlib HSV tuples.
supports RGB colors with ranges between 0-1 or 0-255.
supported matplotlib colors can be found here:
https://matplotlib.org/3.3.1/gallery/color/named_col... | 15,001 |
def _build_auth_record(response):
"""Build an AuthenticationRecord from the result of an MSAL ClientApplication token request"""
try:
id_token = response["id_token_claims"]
if "client_info" in response:
client_info = json.loads(_decode_client_info(response["client_info"]))
... | 15,002 |
def main(selection="user", headless=False, short_exec=False):
"""
Replays a batch of demos using action primitives
Creates threads for a batch of demos to be replayed in parallel
Uses the code in replay_demo_with_action_primitives.py
"""
print("*" * 80 + "\nDescription:" + main.__doc__ + "\n" +... | 15,003 |
def loc_data_idx(loc_idx):
"""
Return tuple of slices containing the unflipped idx corresponding to loc_idx.
By 'unflipped' we mean that if a slice has a negative step, we wish to retrieve
the corresponding indices but not in reverse order.
Examples
--------
>>> loc_data_idx(slice(11, None,... | 15,004 |
def eat_descriptor(descr):
"""
Read head of a field/method descriptor. Returns a pair of strings, where
the first one is a human-readable string representation of the first found
type, and the second one is the tail of the parameter.
"""
array_dim = 0
while descr[0] == '[':
array_di... | 15,005 |
def legendre(a, p):
"""Legendre symbol"""
tmp = pow(a, (p-1)//2, p)
return -1 if tmp == p-1 else tmp | 15,006 |
def get_parser():
"""
Create a parser with some arguments used to configure the app.
Returns:
argparse.ArgumentParser:
"""
parser = argparse.ArgumentParser(description="configuration")
parser.add_argument(
"--upload-folder",
required=True,
metavar="path",
... | 15,007 |
def _interpolate(acq, coefficient, sat_sol_angles_fname, coefficients_fname,
ancillary_fname, out_fname,
compression=H5CompressionFilter.LZF, filter_opts=None,
method=Method.SHEARB):
"""
A private wrapper for dealing with the internal custom workings of the
... | 15,008 |
def precheck_arguments(args):
""" Make sure the argument choices are valid """
any_filelist = (len(args.filelist_name[0]) > 0 or len(args.output_dir[0]) > 0 or args.num_genomes[0] > 0)
if len(args.filelist_name[0]) > 0 and len(args.output_dir[0]) == 0:
print("Error: Need to specify output directory with -O if... | 15,009 |
def get_image_blob(im):
"""Converts an image into a network input.
Arguments:
im (ndarray): a color image
Returns:
blob (ndarray): a data blob holding an image pyramid
im_scale_factors (list): list of image scales (relative to im) used
in the image pyramid
"""
im_... | 15,010 |
def main():
"""Start the bot."""
sp = get_spotify_client()
user_id = os.environ.get('SPOTIFY_USER_ID')
playlist_id = os.environ.get('SPOTIFY_PLAYLIST_ID')
playlister = PlaylistMaker(sp, user_id, playlist_id)
TOKEN = os.environ.get('TELEGRAM_BOT_TOKEN')
PORT = int(os.environ.get('PORT', '84... | 15,011 |
def get_expr_fields(self):
"""
get the Fields referenced by switch or list expression
"""
def get_expr_field_names(expr):
if expr.op is None:
if expr.lenfield_name is not None:
return [expr.lenfield_name]
else:
# constant value expr
... | 15,012 |
def add_bias_towards_void(transformer_class_logits, void_prior_prob=0.9):
"""Adds init bias towards the void (no object) class to the class logits.
We initialize the void class with a large probability, similar to Section 3.3
of the Focal Loss paper.
Reference:
Focal Loss for Dense Object Detection, ICCV ... | 15,013 |
def chkExists( path ):
"""If the given file or directory does not exist, raise an exception"""
if not os.path.exists(path): raise IOError("Directory or file %s does not exist" % path) | 15,014 |
def validate(instance, schema, instance_cls, cls=None, *args, **kwargs):
"""This is a carbon-copy of :method:`jsonschema.validate` except that it
takes two validator classes instead of just one. In the jsonschema
implementation, `cls` is used to validate both the schema and the
instance. This changes th... | 15,015 |
def show_help( ):
""" displays the program parameter list and usage information """
stdout( "usage: " + sys.argv[0] + " -f <path>" )
stdout( " " )
stdout( " option description" )
stdout( " -h help (this text here)" )
stdout( " -f GO flat file to import [tab delimited]" )
stdout( " " )
s... | 15,016 |
def trajnet_batch_multi_eval(preds, gt, seq_start_end):
"""Calculate Top-k ADE, Top-k FDE for batch of samples.
pred = Num_modes x Num_ped x Num_timesteps x 2
gt = Num_ped x Num_timesteps x 2
seq_start_end (batch delimiter) = Num_batches x 2
"""
s_topk_ade = 0
s_topk_fde = 0
for (start,... | 15,017 |
def disemvowel(sentence):
"""Disemvowel:
Given a sentence, return the sentence with all vowels removed.
>>> disemvowel('the quick brown fox jumps over the lazy dog')
'th qck brwn fx jmps vr th lzy dg'
"""
vowels = ('a','e','i','o','u')
for x in sentence:
if x in vowels:
... | 15,018 |
def runPolyReg(xValueList, yValueList, degrees):
"""
Preforms *Polynomial Regression* based on the arguments provided.
Note that we split the data by the *First* 80 percent of the data and then the *Last* 20 percent of the data, rather than randomly splitting the data by 80/20 for the Train/Test split.
... | 15,019 |
def unfold_phi_vulpiani(phidp, kdp):
"""Alternative phase unfolding which completely relies on :math:`K_{DP}`.
This unfolding should be used in oder to iteratively reconstruct
:math:`Phi_{DP}` and :math:`K_{DP}` (see :cite:`Vulpiani2012`).
Parameters
----------
phidp : :class:`numpy:numpy.ndar... | 15,020 |
def test_is_layout_using_existing_script_no_scripts():
"""
Given
- layout which has no scripts.
- id_set.json
When
- is_layout_scripts_found is called with an id_set.json
Then
- Ensure that is_layout_scripts_found returns True.
"""
validator = IDSetValidations(i... | 15,021 |
def get_result_df(session):
"""
query the match table and put results into pandas dataframe,
to train the team-level model.
"""
df_past = pd.DataFrame(
np.array(
[
[s.fixture.date, s.fixture.home_team, s.fixture.away_team, s.home_score, s.away_score]
... | 15,022 |
def permute1d(preserve_symmetry = True):
"""Choose order to rearrange rows or columns of puzzle."""
bp = block_permutation(preserve_symmetry)
ip = [block_permutation(False),block_permutation(preserve_symmetry)]
if preserve_symmetry:
ip.append([2-ip[0][2],2-ip[0][1],2-ip[0][0]])
else:
... | 15,023 |
def find_diff(sha, files=None):
"""Find the diff since the given sha."""
if files:
for file_or_dir in files:
msg = f"{file_or_dir} doesn't exist. Please provide a valid path."
assert os.path.exists(file_or_dir), msg
else:
files = ['*.py']
res = subprocess.run(
... | 15,024 |
def split_name_with_nii(filename):
"""
Returns the clean basename and extension of a file.
Means that this correctly manages the ".nii.gz" extensions.
:param filename: The filename to clean
:return: A tuple of the clean basename and the full extension
"""
base, ext = os.path.splitext(filenam... | 15,025 |
def take(n: int, iterable: Iterable[T_]) -> List[T_]:
"""Return first n items of the iterable as a list"""
return list(islice(iterable, n)) | 15,026 |
def abs(rv):
"""
Returns the absolute value of a random variable
"""
return rv.abs() | 15,027 |
def compute_ssm(X, metric="cosine"):
"""Computes the self-similarity matrix of X."""
D = distance.pdist(X, metric=metric)
D = distance.squareform(D)
for i in range(D.shape[0]):
for j in range(D.shape[1]):
if np.isnan(D[i, j]):
D[i, j] = 0
D /= D.max()
return 1... | 15,028 |
def escape(instruction):
"""
Escape used dot graph characters in given instruction so they will be
displayed correctly.
"""
instruction = instruction.replace('<', r'\<')
instruction = instruction.replace('>', r'\>')
instruction = instruction.replace('|', r'\|')
instruction = instruction.... | 15,029 |
def chunks(blocks: Iterable, n: int = 16) -> Iterable:
"""
Yield successive n-sized chunks from blocks.
:param blocks:
:param n:
:return:
"""
for i in range(0, len(blocks), n):
yield blocks[i : i + n] | 15,030 |
def authenticate(ws, service_account_file, audience):
"""Authenticates the WebSocket"""
ws.send("Bearer {token}".format(token=google_id_token.get_id_token(service_account_file, audience))) | 15,031 |
def _json_object_hook(d):
"""
JSON to object helper
:param d: data
:return: namedtuple
"""
keys = []
for k in d.keys():
if k[0].isdigit():
k = 'd_{}'.format(k)
keys.append(k)
return namedtuple('X', keys)(*d.values()) | 15,032 |
def _fourier_interpolate(x, y):
""" Simple linear interpolation for FFTs"""
xs = np.linspace(x[0], x[-1], len(x))
intp = interp1d(x, y, kind="linear", fill_value="extrapolate")
ys = intp(xs)
return xs, ys | 15,033 |
def compute_hash_json_digest(*args, **kwargs):
"""compute json hash of given args and kwargs and return md5 hex digest"""
as_json = compute_hash_json(*args, **kwargs)
return hashlib.md5(as_json).hexdigest() | 15,034 |
def hello_world():
"""Print welcome message as the response body."""
return '{"info": "Refer to internal http://metadata-db for more information"}' | 15,035 |
def get_role(server: discord.Server, role_arg: str) -> discord.Role:
"""
Get a role from a passed command parameter (name, mention or ID).
:return:
"""
try:
role_id = extract_role_id(role_arg)
except discord.InvalidArgument: # no ID, treat as a role name
try:
role = ... | 15,036 |
def tf_cc_library(
name,
srcs = [],
hdrs = [],
deps = [],
tf_deps = [],
copts = [],
compatible_with = None,
testonly = 0,
alwayslink = 0):
""" A rule to build a TensorFlow library or OpKernel.
Just like cc_library, but:
* Adds always... | 15,037 |
def build_timestamp(timestamp=None) -> google.protobuf.timestamp_pb2.Timestamp:
"""Convert Python datetime to Protobuf Timestamp"""
# https://github.com/protocolbuffers/protobuf/issues/3986
proto_timestamp = google.protobuf.timestamp_pb2.Timestamp()
return proto_timestamp.FromDatetime(timestamp or datet... | 15,038 |
def set_workspace(data: Dict[str, Any]) -> Dict[str, Any]:
"""Set workspace."""
workspace_path = data.get("path", None)
if not workspace_path:
raise ClientErrorException("Parameter 'path' is missing in request.")
os.makedirs(workspace_path, exist_ok=True)
workdir = Workdir()
workdir.se... | 15,039 |
def read_blosum():
"""Read blosum dict and delete some keys and values."""
with open('./psiblast/blosum62.pkl', 'rb') as f:
blosum_dict = cPickle.load(f)
temp = blosum_dict.pop('*')
temp = blosum_dict.pop('B')
temp = blosum_dict.pop('Z')
temp = blosum_dict.pop('X')
temp = b... | 15,040 |
def parse_spans_bio_with_errors(seq: List[str]) -> Tuple[List[Span], List[Error]]:
"""Parse a sequence of BIO labels into a list of spans but return any violations of the encoding scheme.
Note:
In the case where labels violate the span encoded scheme, for example the
tag is a new type (like ``I... | 15,041 |
def find_best_similar_match(i1: int, i2: int, j1: int, j2: int, a: Sequence, b: Sequence, sm: SequenceMatcher = None) \
-> Tuple[int, int, float]:
"""
Finds most similar pair of elements in sequences bounded by indexes a[i1:i2], b[j1: j2].
:param i1: starting index in "a" sequence.
:param i2: e... | 15,042 |
def ConvUpscaleBlock(inputs, n_filters, kernel_size=[3, 3], scale=2):
"""
Basic conv transpose block for Encoder-Decoder upsampling
Apply successivly Transposed Convolution, BatchNormalization, ReLU nonlinearity
"""
net = slim.conv2d_transpose(inputs, n_filters, kernel_size=[3, 3], stride=[2, 2], ac... | 15,043 |
def create_ticket(
client, chat_id, user_id, group_id, recipient_email, subject,
slack_message_url
):
"""Create a new zendesk ticket in response to a new user question.
:param client: The Zendesk web client to use.
:param chat_id: The conversation ID on slack.
:param user_id: Who to create th... | 15,044 |
def make_random_password(self, length = 10, allowed_chars = 'abcdefghjkmnpqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ23456789'):
"""
Generate a random password with the given length and given
allowed_chars. The default value of allowed_chars does not have "I" or
"O" or letters and digits that look similar -- just to avoid c... | 15,045 |
def autocov_vector(x, nlags=None):
"""
This method computes the following function
.. math::
R_{xx}(k) = E{ x(t)x^{*}(t-k) } = E{ x(t+k)x^{*}(k) }
k \in {0, 1, ..., nlags-1}
(* := conjugate transpose)
Note: this is related to
the other commonly used definition for vector autocovarian... | 15,046 |
def sequence_masking(x, mask, mode=0, axis=None, heads=1):
"""为序列条件mask的函数
mask: 形如(batch_size, sequence)的0-1矩阵;
mode: 如果是0,则直接乘以mask;
如果是1,则在padding部分减去一个大正数。
axis: 序列所在轴,默认为1;
heads: 相当于batch这一维要被重复的次数。
"""
if mask is None or mode not in [0, 1]:
return x
else:
... | 15,047 |
def declare_encoding(log, labelling, encoding, additional_columns, cols=None): #TODO JONAS
"""creates and returns the DataFrame encoded using the declare encoding
:param log:
:param labelling:
:param encoding:
:param additional_columns:
:param cols:
:return:
"""
filter_t = True
... | 15,048 |
def __long_description() -> str:
"""Returns project long description."""
return f"{__readme()}\n\n{__changelog()}" | 15,049 |
def captains_draft(path=None, config=None):
"""Similar to captains mode with a 27 heroes, only 3 bans per teams"""
game = _default_game(path, config=config)
game.options.game_mode = int(DOTA_GameMode.DOTA_GAMEMODE_CD)
return game | 15,050 |
def create(width, height, pattern=None):
"""Create an image optionally filled with the given pattern.
:note: You can make no assumptions about the return type; usually it will
be ImageData or CompressedImageData, but patterns are free to return
any subclass of AbstractImage.
:Parameters:
... | 15,051 |
def compute_conformer(smile: str, max_iter: int = -1) -> np.ndarray:
"""Computes conformer.
Args:
smile: Smile string.
max_iter: Maximum number of iterations to perform when optimising MMFF force
field. If set to <= 0, energy optimisation is not performed.
Returns:
A tuple containing index, fi... | 15,052 |
def get_api_key():
"""Load API key."""
api_key_file = open('mailgun_api_key.txt', 'r')
api_key = api_key_file.read()
api_key_file.close()
return api_key.strip() | 15,053 |
def bitserial_conv2d_strategy_hls(attrs, inputs, out_type, target):
"""bitserial_conv2d hls strategy"""
strategy = _op.OpStrategy()
layout = attrs.data_layout
if layout == "NCHW":
strategy.add_implementation(
wrap_compute_bitserial_conv2d(topi.nn.bitserial_conv2d_nchw),
w... | 15,054 |
def visit(planfile,tracefile=None) :
""" Reduce an APOGEE visit
Driver to do 3 chips in parallel
Makes median flux plots
"""
# reduce channels in parallel
chan=['a','b','c' ]
procs=[]
for channel in [1] :
kw={'planfile' : planfile, 'channel' : channel, 'clobber' : ... | 15,055 |
def get_fpga_bypass_mode(serverid):
""" Read back FPGA bypass mode setting
"""
try:
interface = get_ipmi_interface(serverid, ["ocsoem", "fpgaread", "mode"])
return parse_get_fpga_bypass_mode(interface, "mode")
except Exception, e:
return set_failure_dict("get_fpga_bypass_mo... | 15,056 |
def lookup_complement(binding):
"""
Extracts a complement link from the scope of the given binding.
Returns an instance of :class:`htsql.core.tr.binding.Recipe`
or ``None`` if a complement link is not found.
`binding` (:class:`htsql.core.tr.binding.Binding`)
A binding node.
"""
pro... | 15,057 |
def flatten(colours):
"""Flatten the cubular array into one long list."""
return list(itertools.chain.from_iterable(itertools.chain.from_iterable(colours))) | 15,058 |
def efficientnet_b6(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
"""EfficientNet-B6"""
# NOTE for train, drop_rate should be 0.5
# kwargs['drop_connect_rate'] = 0.2 # set when training, TODO add as cmd arg
model_name = "tf_efficientnet_b6"
default_cfg = default_cfgs[model_name]
mo... | 15,059 |
def env_revert_setup_parser(subparser):
"""restore environments to their state before update"""
subparser.add_argument(
metavar='env', dest='env',
help='name or directory of the environment to activate'
)
spack.cmd.common.arguments.add_common_arguments(subparser, ['yes_to_all']) | 15,060 |
def create_word_search_board(number: int):
"""
This function creates a numpy array of zeros, with dimensions of
number x number, which is set by the user. The array is then
iterated through, and zeros are replaced with -1's to avoid
confusion with the alphabet (A) beginning at 0.
"""
board ... | 15,061 |
def combine_station_data(station, input_dir, temp_dir):
"""
This function combines data for a given station across multiple
realizations, writting a single output file in temp_dir
"""
data = {}
# Get realizations
realizations = sorted(os.listdir(input_dir))
for realization in realization... | 15,062 |
def test_register_sequence_decl_path2(collector, sequence_decl):
"""Test handling wrong path : too many ':'.
"""
tb = {}
sequence_decl.view = 'exopy_pulses.sequences:sequences:Sequence'
sequence_decl.register(collector, tb)
assert 'exopy_pulses.BaseSequence' in tb | 15,063 |
def get_parent_choices(menu, menu_item=None):
"""
Returns flat list of tuples (possible_parent.pk, possible_parent.caption_with_spacer).
If 'menu_item' is not given or None, returns every item of the menu. If given, intentionally omit it and its descendant in the list.
"""
def get_flat_tuples(menu_i... | 15,064 |
def test_get_mli_type(mli_score):
"""Ensure correct type is returned."""
typ = Score.get_type(mli_score.dump())
assert typ == MLIScore | 15,065 |
def _update_jacobian(state, jac):
"""
we update the jacobian using J(t_{n+1}, y^0_{n+1})
following the scipy bdf implementation rather than J(t_n, y_n) as per [1]
"""
J = jac(state.y0, state.t + state.h)
n_jacobian_evals = state.n_jacobian_evals + 1
LU = jax.scipy.linalg.lu_factor(state.M - ... | 15,066 |
def find_matches(article, is_saved=False):
"""
Tries to find connecting articles from the related links provided by google
:param is_saved: bool
:param article: Article
:return:
"""
print("Trying to match " + str(len(article.relatedLinks)) + " articles for " + article.title)
for link in ... | 15,067 |
def reduce_pad(sess: tf.Session, op_tensor_tuple: Tuple[Op, List[tf.Tensor]], _) -> (str, tf.Operation, tf.Operation):
"""
Pad module reducer
:param sess: current tf session
:param op_tensor_tuple: tuple containing the op to reduce, and a list of input tensors to the op
"""
name = "reduced_" + ... | 15,068 |
def parse_fastq(fh):
""" Parse reads from a FASTQ filehandle. For each read, we
return a name, nucleotide-string, quality-string triple. """
reads = []
while True:
first_line = fh.readline()
if len(first_line) == 0:
break # end of file
name = first_line[1:].rstr... | 15,069 |
def minhash_256(features):
# type: (List[int]) -> bytes
"""
Create 256-bit minimum hash digest.
:param List[int] features: List of integer features
:return: 256-bit binary from the least significant bits of the minhash values
:rtype: bytes
"""
return compress(minhash(features), 4) | 15,070 |
def coalesce(*values):
"""Returns the first not-None arguement or None"""
return next((v for v in values if v is not None), None) | 15,071 |
def comparison_scatter(Xexact,Xinferred,vmax=None,color='cornflowerblue',alpha=0.05,axes=None,y=0.8):
""" This method is used to compare the inferred force components to
the exact ones along the trajectory, in a graphical way. It assumes
that the compute_accuracy method has been called before to provide
... | 15,072 |
def test06_load_various_features(variant_scalar_rgb, mesh_format, features, face_normals):
"""Tests the OBJ & PLY loaders with combinations of vertex / face normals,
presence and absence of UVs, etc.
"""
from mitsuba.core.xml import load_string
def test():
shape = load_string("""
... | 15,073 |
def euc_reflection(x, a):
"""
Euclidean reflection (also hyperbolic) of x
Along the geodesic that goes through a and the origin
(straight line)
"""
xTa = torch.sum(x * a, dim=-1, keepdim=True)
norm_a_sq = torch.sum(a ** 2, dim=-1, keepdim=True).clamp_min(MIN_NORM)
proj = xTa * a / norm_a... | 15,074 |
def is_online():
"""Check if host is online"""
conn = httplib.HTTPSConnection("www.google.com", timeout=1)
try:
conn.request("HEAD", "/")
return True
except Exception:
return False
finally:
conn.close() | 15,075 |
def show_digit(x):
"""
Inputs:
x: cluster center matrix (k, p), returned by kmeans.
"""
w = 20
h = 20
col = 10
row = (x.shape[0] + col - 1) // col
plt.figure(figsize=(10, 10))
padding = row * col - x.shape[0]
if padding:
print(x.shape, padding)
x = np.vst... | 15,076 |
def text_value(s):
"""Convert a raw Text property value to the string it represents.
Returns an 8-bit string, in the encoding of the original SGF string.
This interprets escape characters, and does whitespace mapping:
- linebreak (LF, CR, LFCR, or CRLF) is converted to \n
- any other whit... | 15,077 |
def modifica_immobile_pw():
"""La funzione riceve l' ID immobile da modificare e ne modifica un attibuto scelto dall'utente """
s = input("Vuoi la lista degli immobili per scegliere il ID Immobile da modificare? (S/N)")
if s == "S" or s =="s":
stampa_immobili_pw()
s= input("Dammi ID... | 15,078 |
def run(context, port):
""" Run the Webserver/SocketIO and app
"""
global ctx
ctx = context
app.run(port=port) | 15,079 |
def timer(function):
"""
timer method for Euler problems
returns result and time taken in seconds
"""
start = _timeit.default_timer()
result = function()
end = _timeit.default_timer()
print(f'result: {result} ({(end-start):.2f}s)') | 15,080 |
def hpat_pandas_series_len(self):
"""
Pandas Series operator :func:`len` implementation
.. only:: developer
Test: python -m hpat.runtests hpat.tests.test_series.TestSeries.test_series_len
Parameters
----------
series: :class:`pandas.Series`
Returns
-------
... | 15,081 |
def test_default(Class, default_in, default_out):
"""
test attribute default property
"""
attribute = Class("test", default=default_in)
assert attribute.default == default_out | 15,082 |
def arrays_not_same_size(inputs: List[np.ndarray]) -> bool:
"""Validates that all input arrays are the same size.
Args:
inputs (List[np.ndarray]): Input arrays to validate
Returns:
true if the arrays are the same size and false if they are not
"""
shapes = [i.shape for i in inputs]
... | 15,083 |
def GetReaderForFile(filename):
"""
Given a filename return a VTK reader that can read it
"""
r = vtkPNGReader()
if not r.CanReadFile(filename):
r = vtkPNMReader()
if not r.CanReadFile(filename):
r = vtkJPEGReader()
if not r.CanReadFile(filename):
... | 15,084 |
def build_figure_nn(df, non_private, semantic):
"""
Dataframe with one semantic and one model
"""
l = df.query("epsilon > 0").sort_values(["train_size", "epsilon"])
naive, low, high = get_plot_bounds(df)
fig = px.line(
l,
x="train_size",
y="accuracy",
range_y=[lo... | 15,085 |
def get_incomplete_sample_nrs(df):
""" Returns sample nrs + topologies if at least 1 algorithm result is missing """
topology_incomplete_sample_nr_map = dict()
n_samples = df.loc[df['sample_idx'].idxmax()]['sample_idx'] + 1
for ilp_method in np.unique(df['algorithm_complete']):
dfx = df[df['algo... | 15,086 |
def play(player1Factory, player2Factory):
"""Play a game of Battleships
player1Factory and player2Factory should be functions capable of turning
a Battleships instance into an Engine for their respective players
"""
game = Game()
#Construct the two players from the given Engine factorie... | 15,087 |
def _read_pos_at_ref_pos(rec: AlignedSegment,
ref_pos: int,
previous: Optional[bool] = None) -> Optional[int]:
"""
Returns the read or query position at the reference position.
If the reference position is not within the span of reference positions to which... | 15,088 |
def load_gtis(fits_file, gtistring=None):
"""Load GTI from HDU EVENTS of file fits_file."""
from astropy.io import fits as pf
import numpy as np
gtistring = _assign_value_if_none(gtistring, 'GTI')
logging.info("Loading GTIS from file %s" % fits_file)
lchdulist = pf.open(fits_file, checksum=True... | 15,089 |
def macro_australia_unemployment_rate():
"""
东方财富-经济数据-澳大利亚-失业率
http://data.eastmoney.com/cjsj/foreign_5_2.html
:return: 失业率
:rtype: pandas.DataFrame
"""
url = "http://datainterface.eastmoney.com/EM_DataCenter/JS.aspx"
params = {
"type": "GJZB",
"sty": "HKZB",
"js... | 15,090 |
def test_masked_registration_random_masks():
"""masked_register_translation should be able to register translations
between images even with random masks."""
# See random number generator for reproducible results
np.random.seed(23)
reference_image = cp.asarray(camera())
shift = (-7, 12)
... | 15,091 |
def _convert_flattened_paths(
paths: List,
quantization: float,
scale_x: float,
scale_y: float,
offset_x: float,
offset_y: float,
simplify: bool,
) -> "LineCollection":
"""Convert a list of FlattenedPaths to a :class:`LineCollection`.
Args:
paths: list of FlattenedPaths
... | 15,092 |
def _CustomSetAttr(self, sAttr, oValue):
""" Our setattr replacement for DispatchBaseClass. """
try:
return _g_dCOMForward['setattr'](self, ComifyName(sAttr), oValue)
except AttributeError:
return _g_dCOMForward['setattr'](self, sAttr, oValue) | 15,093 |
def reduce_to_contemporaneous(ts):
"""
Simplify the ts to only the contemporaneous samples, and return the new ts + node map
"""
samples = ts.samples()
contmpr_samples = samples[ts.tables.nodes.time[samples] == 0]
return ts.simplify(
contmpr_samples,
map_nodes=True,
keep_... | 15,094 |
def mk_inv_part_txt_file(filename):
"""This function downloads the inventory pdf file given by 'part' and
saves it in the 'data' directory.
It also saves the retrieval time of the file.
It produces a txt file for the pdf file with pdftotext.
"""
url = ('http://www.pinakothek.de/sites/default/fil... | 15,095 |
def scenario_map_fn(
example,
*,
snr_range: tuple = (20, 30),
sync_speech_source=True,
add_speech_reverberation_early=True,
add_speech_reverberation_tail=True,
early_rir_samples: int = int(8000 * 0.05), # 50 milli seconds
details=False,
):
"""
... | 15,096 |
def init():
"""
Initializes the local key-value store.
"""
STORAGE_DIR.mkdir(exist_ok=True) | 15,097 |
def validate_checkpoint_type(checkpoint_type: str) -> None:
"""
Check that the passed `checkpoint_type` is valid.
"""
ALLOWED_CHECKPOINT_TYPES = ["state", "model"]
assert checkpoint_type in ALLOWED_CHECKPOINT_TYPES, (
f"'checkpoint_type' ('{checkpoint_type}') not understood (likely "
... | 15,098 |
def assert_same_shallow_tree(shallow, tree):
"""Asserts that `tree` has the same shallow structure as `shallow`."""
# Do a dummy multimap for the side-effect of verifying that the structures are
# the same. This doesn't catch all the errors we actually care about, sadly.
map_tree_up_to(shallow, lambda *args: ()... | 15,099 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.