content stringlengths 22 815k | id int64 0 4.91M |
|---|---|
def read_data_file(file_path: str, filename: str) -> Union[pd.DataFrame, str]:
"""Check read data file."""
logger.info(f"Reading {file_path}")
try:
if file_path.endswith(CSV):
return pd.read_csv(file_path, sep=",")
elif file_path.endswith(TSV):
return pd.read_csv(fil... | 11,100 |
def mnist_loader(path="../../corruptmnist", n_files=8, image_scale=255):
"""
Loads .npz corruptedmnist, assumes loaded image values to be between 0 and 1
"""
# load and stack the corrupted mnist dataset
train_images = np.vstack(
[np.load(path + "/train_{}.npz".format(str(i)))["images"] for i... | 11,101 |
def create_or_append_to_zip(file_handle, zip_path, arc_name=None):
"""
Append file_handle to given zip_path with name arc_name if given, else file_handle. zip_path will be created.
:param file_handle: path to file or file-like object
:param zip_path: path to zip archive
:param arc_name: optional fil... | 11,102 |
def reduce_min(values, index, name='segmented_reduce_min'):
"""Computes the minimum over segments."""
return _segment_reduce(values, index, tf.math.unsorted_segment_min, name) | 11,103 |
def write_exam_list(data: dict, filename: str) -> None:
"""Write a list of exam names to a json file
:param data: a dictionary containing data for all exams
:param filename: the file to write the list of exams to
:rtype: None
"""
exams = [e.name for e in data.keys()]
with open(filename, 'w'... | 11,104 |
def main():
"""Populate the Redshift database."""
config = configparser.ConfigParser()
config.read('dwh.cfg')
conn = psycopg2.connect(
'host={0} dbname={1} user={2} password={3} port={4}'.format(
*config['CLUSTER'].values(),
),
)
cur = conn.cursor()
load_staging... | 11,105 |
def _applyTargetState(targetState, md, httpclient):
"""
compares the current device state against the targetStateProvider and issues updates as necessary to ensure the
device is
at that state.
:param md:
:param targetState: the target state.
:param httpclient: the http client
:return:
... | 11,106 |
def _data_type(data_string: str):
""" convert the data type string (i.e., FLOAT, INT16, etc.) to the appropriate int.
See: https://deeplearning4j.org/api/latest/onnx/Onnx.TensorProto.DataType.html
"""
for key, val in glob.DATA_TYPES.items():
if key == data_string:
return val
_pr... | 11,107 |
def test_gruneisen_mesh(ph_nacl_gruneisen):
"""Test of mode Grueneisen parameter calculation on sampling mesh."""
ph0, ph_minus, ph_plus = ph_nacl_gruneisen
phg = PhonopyGruneisen(ph0, ph_minus, ph_plus)
phg.set_mesh([4, 4, 4])
# qpoints, weights, freqs, eigvecs, gamma = phg.get_mesh()
weights =... | 11,108 |
def bin_power(dataset, fsamp:int, band=range(0, 45)):
"""Power spec
Args:
dataset: n_epoch x n_channel x n_sample
fsamp:
band:
Returns:
n_epoch x n_channel x len(band)
"""
res = []
for i, data in enumerate(dataset):
res.append(power(data, fsamp=fsamp, ba... | 11,109 |
def deflate_and_base64_encode(string_val):
"""
Deflates and the base64 encodes a string
:param string_val: The string to deflate and encode
:return: The deflated and encoded string
"""
if not isinstance(string_val, six.binary_type):
string_val = string_val.encode('utf-8')
return bas... | 11,110 |
def smooth_reward_curve(x, y):
"""Smooths a reward curve--- how?"""
k = min(31, int(np.ceil(len(x) / 30))) # Halfwidth of our smoothing convolution
xsmoo = x[k:-k]
ysmoo = np.convolve(y, np.ones(2 * k + 1), mode='valid') / np.convolve(np.ones_like(y), np.ones(2 * k + 1), mode='valid')
downsample = max(int(np... | 11,111 |
def sdot(s):
"""Returns the time derivative of a given state.
Args:
s(1x6 numpy array): the state vector [rx,ry,rz,vx,vy,vz]
Returns:
1x6 numpy array: the time derivative of s [vx,vy,vz,ax,ay,az]
"""
mu_Earth = 398600.4405
r = np.linalg.norm(s[0:3])
a = -mu_Ear... | 11,112 |
def abort(
status_code: int,
message: t.Optional[str] = None,
detail: t.Optional[t.Any] = None,
headers: t.Optional[t.Mapping[str, str]] = None
) -> None:
"""A function to raise HTTPError exception.
Similar to Flask's `abort`, but returns a JSON response.
Examples:
```python
from ... | 11,113 |
def merge_xunit(in_files, out_file, ignore_flaky=False, quiet=False):
"""
Merges the input files into the specified output file.
:param in_files: list of input files
:param out_file: location to write merged output file
:param ignore_flaky: whether to ignore flaky test cases
:param quiet: whether to suppres... | 11,114 |
def get_values(abf,key="freq",continuous=False):
"""returns Xs, Ys (the key), and sweep #s for every AP found."""
Xs,Ys,Ss=[],[],[]
for sweep in range(abf.sweeps):
for AP in cm.matrixToDicts(abf.APs):
if not AP["sweep"]==sweep:
continue
Ys.append(AP[key])
... | 11,115 |
def test_get_invalid_individual_recipes(test_client):
"""
GIVEN a Flask application configured for testing
WHEN the '/blog/<blog_title>' page is requested (GET) with invalid blog titles
THEN check that 404 errors are returned
"""
invalid_blog_titles = ['instant_pot', 'butter', 'abcd']
for bl... | 11,116 |
def geodetic2cd(
gglat_deg_array, gglon_deg_array, ggalt_km_array, decimals=2, year=2021.0
):
"""Transformation from Geodetic (lat, lon, alt) to Centered Dipole (CD) (lat, lon, alt).
Author: Giorgio Savastano (giorgiosavastano@gmail.com)
Parameters
----------
gglon_deg_array : np.ndarray
... | 11,117 |
def load_pickle(file_path):
"""
load the pickle object from the given path
:param file_path: path of the pickle file
:return: obj => loaded obj
"""
with open(file_path, "rb") as obj_des:
obj = pickle.load(obj_des)
# return the loaded object
return obj | 11,118 |
def main():
""" Main """
scores = np.array([1.0, 2.0, 3.0])
print softmax(scores)
scores = np.array([[1, 2, 3, 6],
[2, 4, 5, 6],
[3, 8, 7, 6]])
print softmax(scores) | 11,119 |
def _expand_global_features(B, T, g, bct=True):
"""Expand global conditioning features to all time steps
Args:
B (int): Batch size.
T (int): Time length.
g (Tensor): Global features, (B x C) or (B x C x 1).
bct (bool) : returns (B x C x T) if True, otherwise (B x T x C)
Retur... | 11,120 |
def get_args():
"""Get command-line arguments"""
parser = argparse.ArgumentParser(
description='Find common kmers',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('file1',
help='Input file 1',
metavar='FILE1',
... | 11,121 |
def plot_CDF(data, ax=None, reverse=False, plot=True, **plotargs):
""" plot Cumulative Ratio. """
n_samples = len(data)
X = sorted(data, reverse=reverse)
Y = np.arange(1,n_samples+1)/n_samples
if plot or ax:
if ax is None:
fig, ax = plt.subplots()
ax.plot(X, Y, **plotarg... | 11,122 |
def is_zsettable(s):
"""quick check that all values in a dict are reals"""
return all(map(lambda x: isinstance(x, (int, float, long)), s.values())) | 11,123 |
def plotMeetingGraphs(robots, index, team, path, subplot=None, length=0):
"""Plot the trajectories of all robots
Input arguments:
robots = robots which measured and moved around
index = which robots should be plotted
team = team of the robots
subplot = if we want to plot all teams
length = ... | 11,124 |
def initialize_simulator(task_ids: Sequence[str],
action_tier: str) -> ActionSimulator:
"""Initialize ActionSimulator for given tasks and tier."""
tasks = phyre.loader.load_compiled_task_list(task_ids)
return ActionSimulator(tasks, action_tier) | 11,125 |
async def async_unload_entry(hass: core.HomeAssistant, config_entry: config_entries.ConfigEntry) -> bool:
"""Unload a config entry."""
_LOGGER.debug("%s: async_unload_entry", DOMAIN)
try:
all_ok = True
for platform in SUPPORTED_PLATFORMS:
_LOGGER.debug("%s - async_setup_entry: un... | 11,126 |
def emr_cluster_security_configuration_check(cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str) -> dict:
"""[EMR.1] EMR Clusters should have a security configuration specified"""
response = list_clusters(cache)
myEmrClusters = response["Clusters"]
for cluster in myEmrClusters:
cl... | 11,127 |
def import_sensitivities(input, file_location):
"""
Ratio is the C/O starting gas ratio
file_location is the LSR C and O binding energy, false to load the base case
"""
tol, ratio = input
try:
data = pd.read_csv(file_location + '/all-sensitivities/' + tol + '{:.1f}RxnSensitivity.csv'.f... | 11,128 |
def extract_files_to_process(options, company_file):
"""Extract the files from the ENER zip file and the ITR/DFP inside of it,
and collect all the XML files
"""
force_download = options.get("force_download", False)
local_base_path = _doc_local_base_path(options, company_file)
# Make sure the f... | 11,129 |
def test():
"""
Set the application name
"""
# get the library
import journal
# make up a name
name = "app"
# register it
journal.application(name)
# get the chronicler's notes
notes = journal.chronicler.notes
# verify that the key is registered and has the correct valu... | 11,130 |
def pg_dump(dsn, output):
"""
Сохраняет схему БД в файл
:param dsn: Строка подключения. Например: username@localhost:5432/dname
:param output: Имя файла для сохранения DDL
:type dsn: str
:type output: str
"""
host, port, user, pwd, dbname, socket = parse_dsn(dsn)
args = [
a... | 11,131 |
def test__additive_hash(ht):
"""Test _addive_hash method work properly."""
assert ht._additive_hash('wriggle') == 53 | 11,132 |
def merge(source, dest):
""" Copy all properties and relations from one entity onto another, then
mark the source entity as an ID alias for the destionation entity. """
if source.id == dest.id:
return source
if dest.same_as == source.id:
return source
if source.same_as == dest.id:
... | 11,133 |
def create_summary_text(summary):
"""
format a dictionary so it can be printed to screen or written to a plain
text file
Args:
summary(dict): the data to format
Returns:
textsummary(str): the summary dict formatted as a string
"""
summaryjson = json.dumps(summary, indent=3)... | 11,134 |
def test_write_to_stats_with_no_parsed_data():
"""It should not call stats when parsing the data returned None."""
loader = ItemLoader()
loader.stats = mock.Mock()
parsed_data = None
expected_stat_key = "parser/ItemLoader/field_name/css/0/missing"
assert loader.write_to_stats("field_name", pa... | 11,135 |
def random_datetime(start, end):
"""Generate a random datetime between `start` and `end`"""
return start + datetime.timedelta(
# Get a random amount of seconds between `start` and `end`
seconds=random.randint(0, int((end - start).total_seconds())),
) | 11,136 |
def maximo_basico(a: float, b: float) -> float:
"""Toma dos números y devuelve el mayor.
Restricción: No utilizar la función max"""
if a > b:
return a
return b | 11,137 |
def listas_mesmo_tamanho(lista_de_listas):
"""
Recebe uma lista de listas e retorna 'True' caso todas as listas
sejam de mesmo tamanho e 'False', caso contrário
"""
tamanho_padrao = len(lista_de_listas[0])
for lista in lista_de_listas:
if(len(lista) != tamanho_padrao):
retu... | 11,138 |
def load_performance_win_x64_win_x64_vs2017_settings(conf):
"""
Setup all compiler and linker settings shared over all win_x64_win_x64_v140 configurations for
the 'performance' configuration
"""
v = conf.env
conf.load_win_x64_win_x64_vs2017_common_settings()
# Load additional shared setting... | 11,139 |
def bam2ec(bam_file, ec_file, chunks, directory, mincount, multisample, number_processes, rangefile, sample, targets, verbose):
"""
Convert a BAM file (bam_file) to a binary EC file (ec_file)
"""
utils.configure_logging(verbose)
if multisample:
if sample:
print('-s, --sample shou... | 11,140 |
def compute_all_mordred_descrs(mols, max_cpus=None, quiet=True):
"""
Compute all Mordred descriptors, including 3D ones
Args:
mols: List of RDKit mol objects for molecules to compute descriptors for.
max_cpus: Max number of cores to use for computing descriptors. None means use all availab... | 11,141 |
def self_quarantine_policy_40():
"""
Real Name: b'self quarantine policy 40'
Original Eqn: b'1-PULSE(self quarantine start 40, self quarantine end 40-self quarantine start 40)*self quarantine effectiveness 40'
Units: b'dmnl'
Limits: (None, None)
Type: component
b''
"""
return 1 - fu... | 11,142 |
def fetch_mate_variant_record(vcfhandle, chr_mate, pos_mate, mateid, count=0, slop=50):
"""
We fetch the MateID variant Record for the breakend being process
:param vcfhandle:
:param chr_mate:
:param pos_mate:
:param mateid: must be a string and not a tuple
:param count: Normally the mate_r... | 11,143 |
def knapsack_bqm(cities, values, weights, total_capacity, value_r=0, weight_r=0):
"""
build the knapsack binary quadratic model
From DWave Knapsack examples
Originally from Andrew Lucas, NP-hard combinatorial problems as Ising spin glasses
Workshop on Classical and Quantum Optimization; ETH Zue... | 11,144 |
def test_build_from_args_no_hit(config_file, random_dt, script_path, new_config):
"""Try building experiment when not in db"""
cmdargs = {
"name": "supernaekei",
"config": config_file,
"user_args": [script_path, "x~uniform(0,10)"],
}
with OrionState(experiments=[], trials=[]):
... | 11,145 |
def nltk_punkt_de(data: List[str], model=None) -> List[str]:
"""Sentence Segmentation (SBD) with NLTK's Punct Tokenizer
Parameters:
-----------
data : List[str]
list of N documents as strings. Each document is then segmented
into sentences.
model (Default: None)
Preloaded... | 11,146 |
def usage():
"""Prints usage to the screen"""
print """
-------------------------------------------------------------------------------
Author: Kyle Hernandez <khernandez@bsd.uchicago.edu>
Description: Gets the top 90th percentile high-quality variants from a VCF file
and outputs them to a new VCF file. This m... | 11,147 |
def is_pattern_error(exception: TypeError) -> bool:
"""Detect whether the input exception was caused by invalid type passed to `re.search`."""
# This is intentionally simplistic and do not involve any traceback analysis
return str(exception) == "expected string or bytes-like object" | 11,148 |
def execute_query(bq_client: bigquery.Client,
env_vars: Dict[str, Union[str, bool]],
query_path: object,
output_table_name: str,
time_partition: bool) -> None:
"""Executes transformation query to a new destination table.
Args:
bq_client: bigquery.Client object
env_vars: Dictiona... | 11,149 |
def textrank(articles, encoders, reduction_methods, reduction_methods_params):
"""
Description: Similarity between any two sentences is used as an equivalent to the web page transition probability
"""
for enc_name, enc_model in encoders.items():
# load sentence encoder
print(enc_name)
... | 11,150 |
def redirect(reluri):
""" Instruct the client to redirect to the supplied relative URI
@param reluri: relative URI to redirect to
"""
raise HTTPRedirect(base_uri() + '/' + reluri) | 11,151 |
def iwave_modes_banded(N2, dz, k=None):
"""
!!! DOES NOT WORK!!!
Calculates the eigenvalues and eigenfunctions to the internal wave eigenvalue problem:
$$
\left[ \frac{d^2}{dz^2} - \frac{1}{c_0} \bar{\rho}_z \right] \phi = 0
$$
with boundary conditions
"""
nz = N2.shape[0... | 11,152 |
def parse_fastq_pf_flag(records):
"""Take a fastq filename split on _ and look for the pass-filter flag
"""
if len(records) < 8:
pf = None
else:
fastq_type = records[-1].lower()
if fastq_type.startswith('pass'):
pf = True
elif fastq_type.startswith('nopass'):
... | 11,153 |
def test_process_reverse_polarity():
"""algorithm_test.AdjustedAlgorithm_test.test_process()
Check adjusted data processing versus files generated from
original script. Tests reverse polarity martix.
"""
# load adjusted data transform matrix and pier correction
a = adj(
statefile="etc/a... | 11,154 |
def rule_manager():
""" Pytest fixture for generating rule manager instance """
ignore_filter = IgnoreFilter(None, verbose=False)
return RuleManager(None, ignore_filter, verbose=False) | 11,155 |
def zeros(fn, arr, *args):
"""
Find where a function crosses 0. Returns the zeroes of the function.
Parameters
----------
fn : function
arr : array of arguments for function
*args : any other arguments the function may have
"""
# the reduced function, with only the argument to be s... | 11,156 |
def create_low_latency_conv_model(fingerprint_input, model_settings,
is_training):
"""Builds a convolutional model with low compute requirements.
This is roughly the network labeled as 'cnn-one-fstride4' in the
'Convolutional Neural Networks for Small-footprint Keyword Spotting'... | 11,157 |
def get_args(description: str = "YouTube") -> argparse.Namespace:
"""
Retrieve parsed arguments as a Namespace.
Parameters
----------
description : str
Description given to ArgumentParser.
Returns
-------
args : argparse.Namespace
Namespace with arguments specified.
... | 11,158 |
def _DropEmptyPathSegments(path):
"""Removes empty segments from the end of path.
Args:
path: A filesystem path.
Returns:
path with trailing empty segments removed. Eg /duck/// => /duck.
"""
while True:
(head, tail) = os.path.split(path)
if tail:
break
path = head
return path | 11,159 |
def _format_author(url, full_name):
""" Helper function to make author link """
return u"<a class='more-info' href='%s'>%s</a>" % (url, full_name) | 11,160 |
def all_series(request: HttpRequest) -> JsonResponse:
"""
View that serves all the series in a JSON array.
:param request: The original request.
:return: A JSON-formatted response with the series.
"""
return JsonResponse([
_series_response(request, s)
for s in get_response(requ... | 11,161 |
def test_foci():
"""Test plotting of foci
"""
mlab.options.backend = 'test'
brain = Brain(*std_args)
coords = [[-36, 18, -3],
[-43, 25, 24],
[-48, 26, -2]]
brain.add_foci(coords, map_surface="white", color="gold")
annot_path = pjoin(subj_dir, subject_id, 'label', 'lh.apa... | 11,162 |
def stlx_powerset(s):
"""If s is a set, the expression pow(s) computes the power set of s. The power set of s is
defined as the set of all subsets of s."""
def powerset_generator(i):
for subset in it.chain.from_iterable(it.combinations(i, r) for r in range(len(i)+1)):
yield set(subset)... | 11,163 |
def get_entries_configuration(data):
"""Given the dictionary of resources, returns the generated factory xml file
Args:
data (dict): A dictionary similar to the one returned by ``get_information``
Returns:
str: The factory xml file as a string
"""
entries_configuration = ""
for... | 11,164 |
def validinput(x0, xf, n):
"""Checks that the user input is valid.
Args:
x0 (float): Start value
xf (float): End values
n (int): Number of sample points
Returns:
False if x0 > xf or if
True otherwise
"""
valid = True
if x0 > xf:
valid = False
... | 11,165 |
def multi_class_bss(predictions: np.ndarray, targets: np.ndarray) -> float:
"""
Brier Skill Score:
bss = 1 - bs / bs_{ref}
bs_{ref} will be computed for a model that makes a predictions according to the prevalance of each class in dataset
:param predictions: probability score. Expected Shape [N, C... | 11,166 |
def get_arxiv_id_or_ascl_id(result_record):
"""
:param result_record:
:return:
"""
identifiers = result_record.get("identifier", [])
for identifier in identifiers:
if "arXiv:" in identifier:
return identifier.replace("arXiv:", "")
if "ascl:" in identifier:
... | 11,167 |
def get_query_claim_similarities(
sim: Mapping[Tuple[str, int], float],
softmax: bool,
) -> Mapping[Tuple[str, int], float]:
"""
Preprocess query claim similarities.
:param sim:
A mapping from (premise_id, claim_id) to the logits of the similarity model, shape: (2,).
:param softmax:
... | 11,168 |
def regexp(options: dict):
"""
Apply a regexp method to the dataset
:param options: contains two values:
- find: which string should be find
- replace: string that will replace the find string
"""
def apply_regexp(dataset, tag):
"""
Apply a regexp to the dataset
... | 11,169 |
def RunUnitTests():
"""Runs all registered unit tests."""
test_runner = module_test_runner.ModuleTestRunner()
test_runner.modules = [
document_test,
model_test,
ops_test,
robot_abstract_test,
util_test,
]
test_runner.RunAllTests() | 11,170 |
def do_one_subject(sub_curr, params, verbose=False):
"""
launch sessions processing for sub_curr
parameters:
-----------
sub_curr: dict
contains subject base directory
contains subject index
params: dict
parameters for layout, data and analysis
... | 11,171 |
def write_to_excel(sheet_one_names, sheet_two_names, url, url2, destfile):
"""
Write to destination excel
"""
for i in sheet_one_names:
data = pd.read_excel(url, sheet_name=i)
with pd.ExcelWriter(destfile, engine="openpyxl", mode="a") as writer:
data.to_excel(writer, index=F... | 11,172 |
def comp_skin_effect(self, freq, T_op=20, T_ref=20, type_skin_effect=1):
"""Compute the skin effect factor for the conductor
Parameters
----------
self : Conductor
an Conductor object
freq: float
electrical frequency [Hz]
T_op: float
Conductor operational temperature [de... | 11,173 |
def insert_point_into_G(G_, point, node_id=100000, max_distance_meters=5,
nearby_nodes_set=set([]), allow_renaming=True,
verbose=False, super_verbose=False):
"""
Insert a new node in the graph closest to the given point.
Notes
-----
If the point is to... | 11,174 |
def draw_bbox(img, ymin, xmin, ymax, xmax, color, str_list=()):
"""draw bounding box over an image."""
font = ImageFont.truetype("/workspace/fonts/JosefinSans-SemiBold.ttf", 25)
draw = ImageDraw.Draw(img)
width, height = img.size
left, right = xmin * width, xmax * width
top, bottom = ymin * hei... | 11,175 |
def find_paths(initial_path, extension):
"""
From a path, return all the files of a given extension inside.
:param initial_path: the initial directory of search
:param extension: the extension of the files to be searched
:return: list of paths inside the initial path
"""
paths = glob.glob(... | 11,176 |
def ridge_line(df_act, t_range='day', n=1000):
"""
https://plotly.com/python/violin/
for one day plot the activity distribution over the day
- sample uniform from each interval
"""
df = activities_dist(df_act.copy(), t_range, n)
colors = n_colors('rgb(5, 200, 200)', 'rgb(200, 10, 10... | 11,177 |
def transfer(
name: str,
start: Optional[datetime.datetime],
interval: str,
source_endpoint: str,
dest_endpoint: str,
label: Optional[str],
stop_after_date: Optional[datetime.datetime],
stop_after_runs: Optional[int],
sync_level: Optional[int],
encrypt_data: bool,
verify_chec... | 11,178 |
def merge_og_files():
""" Function to open, crop and merge the APHRODITE data """
ds_list = []
extent = ls.basin_extent('indus')
print('1951-2007')
for f in tqdm(glob.glob(
'_Data/APHRODITE/APHRO_MA_025deg_V1101.1951-2007.gz/*.nc')):
ds = xr.open_dataset(f)
ds = ds.rena... | 11,179 |
def op(name,
value,
display_name=None,
description=None,
collections=None):
"""Create a TensorFlow summary op to record data associated with a particular the given guest.
Arguments:
name: A name for this summary operation.
guest: A rank-0 string `Tensor`.
display_n... | 11,180 |
def select_only_top_n_common_types(dataset: pd.DataFrame, n: int = 10) -> pd.DataFrame:
"""
First find the most popular 'n' types. Remove any uncommon types from the
dataset
:param dataset: The complete dataset
:param n: The number of top types to select
:return: Return the dataframe once the t... | 11,181 |
def renew_cached_query_task(pk: int):
"""
Renews `CachedQuery` object identified by primary key `pk`
:param pk: primary key
"""
try:
cq = CachedQuery.objects.get(pk=pk)
except CachedQuery.DoesNotExist:
logger.debug('renew_cached_query: CachedQuery object not found: #%s', pk)
... | 11,182 |
def load_checkpoint(filename='checkpoint.pth.tar'):
"""Load for general purpose (e.g., resume training)"""
filename = os.path.join(CHECKPOINTS_PATH, filename)
print(filename)
if not os.path.isfile(filename):
return None
state = torch.load(filename)
return state | 11,183 |
def __one_both_closed(x, y, c = None, l = None):
"""convert coordinates to zero-based, both strand, open/closed coordinates.
Parameters are from, to, is_positive_strand, length of contig.
"""
return x - 1, y | 11,184 |
def get_corners(img, sigma=1, alpha=0.05, thresh=1000):
""" Returns the detected corners as a list of tuples """
ret = []
i_x = diff_x(img)
i_y = diff_y(img)
i_xx = ndimage.gaussian_filter(i_x ** 2, sigma=sigma)
i_yy = ndimage.gaussian_filter(i_y ** 2, sigma=sigma)
i_xy = ndimage.gaus... | 11,185 |
def add_param_starts(this_starts, params_req, global_conf, run_period_len, start_values_min, start_values_max):
"""Process the param starts information taken from the generator, and add it to
the array being constructed.
Inputs:
this_starts: a tuple with (starts_min, starts_max), the outpu... | 11,186 |
def pareto(data, name=None, exp=None, minval=None, maxval=None, **kwargs):
"""the pareto distribution: val ~ val**exp | minval <= val < maxval
"""
assert (exp is not None) and (minval is not None) and (maxval is not None), \
'must supply exp, minval, and maxval!' ### done to make command-line argume... | 11,187 |
def codegen_reload_data():
"""Parameters to codegen used to generate the fn_ansible_tower package"""
reload_params = {"package": u"fn_ansible_tower",
"incident_fields": [],
"action_fields": [u"ansible_tower_arguments", u"ansible_tower_credential", u"ansible_tower_hosts",... | 11,188 |
def get_default_identity(username, provider=None):
"""
Return the default identity given to the user-group for provider.
"""
try:
filter_query = {}
if provider:
filter_query['provider'] = provider
from core.models.group import GroupMembership
memberships = Gro... | 11,189 |
def add_db(vdb:VariantsDb):
"""Add that db to settings, connections, and activate it"""
from varappx.handle_init import db
vdb.is_active = 1
db.session.add(vdb)
db.session.commit()
add_db_to_settings(vdb.name, vdb.filename) | 11,190 |
def get_text_item(text):
"""Converts a text into a tokenized text item
:param text:
:return:
"""
if config['data']['lowercased']:
text = text.lower()
question_tokens = [Token(t) for t in word_tokenize(text)]
question_sentence = Sentence(' '.join([t.text for t in question_tokens]), q... | 11,191 |
def Ltotal(scatter: bool):
"""
Graph for computing 'Ltotal'.
"""
graph = beamline(scatter=scatter)
if not scatter:
return graph
del graph['two_theta']
return graph | 11,192 |
def gen_dir(download_dir, main_keyword):
"""Helper function | generates a directory where pics will be downloaded"""
if not download_dir:
download_dir = './data/'
img_dir = download_dir + main_keyword + '/'
if not os.path.exists(img_dir):
os.makedirs(img_dir)
return img_dir | 11,193 |
def is_valid_page_to_edit(prev_pg_to_edit, pg_to_edit):
"""Check if the page is valid to edit or not
Args:
prev_pg_to_edit (obj): page to edit object of previous page
pg_to_edit (obj): page to edit object of current page
Returns:
boolean: true if valid else false
"""
try:
... | 11,194 |
def get_diffs(backups, backup_id, partner_backups, bound=10):
"""
Given a list `backups`, a `backup_id`, and `bound`
Compute the a dict containing diffs/stats of surronding the `backup_id`:
diff_dict = {
"stats": diff_stats_list,
"files": files_list,
"partners": partner_files... | 11,195 |
def admits_voc_list(cid: CID) -> List[str]:
"""
Return list of nodes in cid with positive value of control.
"""
return [x for x in list(cid.nodes) if admits_voc(cid, x)] | 11,196 |
def test_medicinalproductpackaged_1(base_settings):
"""No. 1 tests collection for MedicinalProductPackaged.
Test File: medicinalproductpackaged-example.json
"""
filename = (
base_settings["unittest_data_dir"] / "medicinalproductpackaged-example.json"
)
inst = medicinalproductpackaged.Med... | 11,197 |
def __load_config_file(file_name):
"""
Loads varibles and constants from yalm config file and turns them into module's global variables
:param filename: str, config file name
:return: None
"""
with open(file_name) as f:
data_map = yaml.safe_load(f)
f.close()
globals().update(... | 11,198 |
def build_lib() -> None:
"""Build the package."""
subprocess.run(["nbdev_build_lib"]) | 11,199 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.