content stringlengths 22 815k | id int64 0 4.91M |
|---|---|
def cdfRosconi(cdfThickness=np.linspace(0,1,1000),
alpha=1.71e11, beta=8.17, gamma=55.54):
"""
TODO: Not Yet Implemented
* Input to this function has units of mm for default parameters.
** Default values of alpha, beta and gamma derived from:
Rosconi et al. Quantitative appro... | 13,300 |
def compute_error_model(model_metadata, X_test, y_test, target,error_metric):
"""Computes the model MRR based on test data
:param model_metadata: a dictionary containing metadata about a model
:param X_test: a dataframe containing features specfic to the model being evaluated
:param y_test: a datafra... | 13,301 |
def column(df, s, column) -> ReturnType:
"""Gets the series of the column named `column`
"""
return df.loc[s, column].to_numpy(), 0 | 13,302 |
def directory_hash(root_path, verbose, hash_format, ignore_list, ignore_spec_file):
"""
[TMP] Creates the directory hash of a given folder
"""
if not os.path.isabs(root_path):
root_path = os.path.join(os.getcwd(), root_path)
# store the directory hashes of sub folders so we can use it when ... | 13,303 |
def bubble_up(heap: List[Node], index: int) -> None:
"""Fixes the binary heap after an insertion
When an insertion happens, the binary heap may be violated, with a smaller
value at the leaves. Bubble up will make the smaller value "surface" on the
heap and preserve the heap invariant.
Parameters
... | 13,304 |
def main():
"""Main entry point."""
run.Run(ZeroR) | 13,305 |
def features_to_id(features, intervals):
"""Convert list of features into index using spacings provided in intervals"""
id = 0
for k in range(len(intervals)):
id += features[k] * intervals[k]
# Allow 0 index to correspond to null molecule 1
id = id + 1
return id | 13,306 |
def index_initial(n_batch, n_ch, tensor=True):
"""Tensor batch and channel index initialization.
Args:
n_batch (Int): Number of batch.
n_ch (Int): Number of channel.
tensor (bool): Return tensor or numpy array
Returns:
Tensor: Batch index
... | 13,307 |
def which(binary_name, pathvar=None):
""" Deduces the path corresponding to an executable name,
as per the UNIX command `which`. Optionally takes an
override for the $PATH environment variable.
Always returns a string - an empty one for those
executables that cannot be found.
"""... | 13,308 |
def cpc_autostart_group():
"""
Command group for managing the auto-start list of a CPC (in DPM mode).
In addition to the command-specific options shown in this help text, the
general options (see 'zhmc --help') can also be specified right after the
'zhmc' command name.
""" | 13,309 |
def load_titanic(test_size=0.2, random_state=1, cache_dir=None, cache_subdir='datasets'):
""" load titanic database """
path = find_path(DatasetEnum.titanic, cache_dir=cache_dir, cache_subdir=cache_subdir)
df = pd.read_csv(path, sep=",", na_values=["?"], keep_default_na=True)
# Shuffle DF and compute ... | 13,310 |
def mock_function_fail(*args, **kwargs):
"""
Mock a function that 'fails', i.e., returns a 1.
"""
print("\nmock> f({}) ==> 1".format(args)) # pragma: no cover
return 1 # pragma: no cover | 13,311 |
def get_field_result(client_id, field_id, count=1):
"""
на входе: id-поля, id-карты,
выход: последний результат поля
:return:
"""
with connection.cursor() as cursor:
cursor.execute(
"""
SELECT directions_napravleniya.client_id, directions_issledovaniya.napravleniy... | 13,312 |
def write_pipeline_files(
output_dir: str,
labels_filename: str,
config_info_list: List[ConfigFileInfo],
inference_params: Dict[str, Any],
items_for_inference: ItemsForInference,
):
"""Writes the config files and scripts for manually running pipeline."""
# Use absolute path for all files th... | 13,313 |
def parse_basic_profile_forms():
"""Parses and validates basic profile forms in the request.
Returns:
A dictionary containing user profile.
Raises:
ValueError: When validation failed.
"""
return {
'display_name': get_form_string('display_name', 32),
'contact_email':... | 13,314 |
def test_lines():
"""Test that the correct line numbers are given in a chrome.manifest."""
c = chrome_manifest("""
zero foo bar
one bar foo
two abc def
#comment
four def abc
""".strip())
assert next(c.get_entries('zero'))['line'] == 1
assert next(c.get_entri... | 13,315 |
def main():
"""Main entrance for training"""
args = parser.parse_args()
print(sys.argv)
context.set_context(mode=context.GRAPH_MODE)
if args.GPU:
context.set_context(device_target='GPU')
# parse model argument
assert args.model.startswith(
"tinynet"), "Only Tinynet models ... | 13,316 |
def fixture_success(request):
"""
Test Cases:
1. Hitting uncovered route as base user (logged in flow). Will return 200
since uncovered route is an open endpoint and thus Anonymous users can also
access it.
2. Hitting uncovered route as base user and HEAD request
3. Hitting uncovered route a... | 13,317 |
def get_quote_name(quote_number: int) -> Optional[str]:
""" used to help applications look up quote names based on the number
users.
"""
assert type(quote_number) in (int, type(None))
if quote_number is None:
return None
for key, value in csv.__dict__.items():
if value == q... | 13,318 |
def calc_floodzone(row):
"""Extracts the FEMAZONE of an SFHA based on each row's attributes.
This function acts on individual rows of a pandas DataFrame using
the apply built-in.
Parameters
----------
row : Pandas Series
A row of a pandas DataFrame
Returns
-------
str
... | 13,319 |
def trans_you(ori_image, img_db, target_size=(8, 8)):
"""Transfer original image to composition of images.
Parameters
----------
ori_image : numpy.ndarray
the original image
img_db : h5py.File
image datasets
target_size : tuple
Returns
-------
res_img : numpy.ndarra... | 13,320 |
def new_assessment():
"""
RESTful CRUD controller to create a new 'complete' survey
- although the created form is a fully custom one
"""
# Load Model
table = s3db.survey_complete
s3db.table("survey_series")
def prep(r):
if r.interactive:
viewing = get_vars.... | 13,321 |
async def random_pokemon(connection: asyncpg.Connection, /) -> types.Pokemon:
"""Returns a random :class:`types.Pokemon`."""
records = await tables.Pokemon.fetch(connection)
return await _pokemon(connection, random.choice(records)) | 13,322 |
def remove_space(text):
"""
Funcion que elimina espacios
:param str text: texto a procesar
"""
return re.sub(r"\s+", " ", text).strip() | 13,323 |
def getFilePathBase():
"""
获取请求url文件的文件路径
:return: php->base64 code
"""
code = """
@ini_set("display_errors","0");
@set_time_limit(0);
@set_magic_quotes_runtime(0);
header("Content-Type:application/json");
$res = array();$res["path"] = dirname(__FILE__);
echo ("<ek>");
echo jso... | 13,324 |
def extractHeldSimple(q, factoryConfig=None):
"""All Held Glideins: JobStatus == 5
q: dictionary of Glideins from condor_q
factoryConfig (FactoryConfig): Factory configuartion (NOT USED, for interface)
Returns:
dict: dictionary of Held Glideins from condor_q
"""
# Held==5
... | 13,325 |
def neighbors(stats1, stats2, max_val=1e5):
"""stats from cv.connectedComponentsWithStats."""
pts1 = np.concatenate(
(stats1[:, :2], stats1[:, :2] + stats1[:, 2:4]), axis=0)
pts2 = np.concatenate(
(stats2[:, :2], stats2[:, :2] + stats2[:, 2:4]), axis=0)
dist = np.abs(pts1[:, None] - pts... | 13,326 |
def advect(gridc, gridx, gridy, scalars, ibmf, ibmx, ibmy, velc, options):
"""
"""
nx, ny = gridc.nx, gridc.ny
dx, dy = gridc.dx, gridc.dy
dt = scalars.dt
lset_iter = options['lset_redistance']
extrap_iter = options['extrap_solid']
u = gridx[velc][0,0,:,:].transpose()
v = gridy[v... | 13,327 |
def test_destroy_as_user(user, short_link):
"""An logged in user should not be able to delete a short link"""
client = get_api_client(user=user)
url = _get_short_link_url(short_link)
response = client.delete(url)
assert response.status_code == status.HTTP_404_NOT_FOUND | 13,328 |
def shortest_substring_containing_characters(text: str, char_set: set) -> Optional[str]:
"""
O(n) & O(k)
"""
start = 0
end = -1
count_char = defaultdict(int) # char and its count
found_set = set()
for index, char in enumerate(text):
if char in char_set:
count_char[c... | 13,329 |
def refine_grid(
grid,
cb,
grid_additions=(50, 50),
ntrail=2,
blurs=((), ()),
metric=None,
atol=None,
rtol=None,
extremum_refinement=None,
snr=False,
):
"""Refines an existing grid by adding points to it.
Parameters
----------
grid : array
cb : callbable
... | 13,330 |
def last_week(today: datetime=None, tz=None):
"""
Returns last week begin (inclusive) and end (exclusive).
:param today: Some date (defaults current datetime)
:param tz: Timezone (defaults pytz UTC)
:return: begin (inclusive), end (exclusive)
"""
if today is None:
today = datetime.ut... | 13,331 |
def get_velocity_limits():
"""
"""
velocity_limits = {}
for i in range(6):
try:
velocity_limits['a{}'.format(i+1)] = float(pm.textField(
't_A{}vel'.format(i+1),
q=True,
... | 13,332 |
def adjust_learning_rate(learning_rate,optimizer, epoch):
"""Sets the learning rate to the initial LR decayed by 10 every 30 epochs"""
lr = learning_rate * (0.1 ** (epoch // 25))
print(str(lr))
for param_group in optimizer.param_groups:
param_group['lr'] = lr | 13,333 |
def when_click_nth_element(context, position, ordinal, words, element_name):
""" Tells the browser to click on the nth element within the element of the given identifier.
::
When the user clicks on the 2nd Entry in the "Table"
"""
logger.info(f'Clicking on {position}{ordinal} "{words}" of the e... | 13,334 |
def decode_item_length(encoded_data: Bytes) -> int:
"""
Find the length of the rlp encoding for the first object in the
encoded sequence.
Here `encoded_data` refers to concatenation of rlp encoding for each
item in a sequence.
NOTE - This is a helper function not described in the spec. It was
... | 13,335 |
def pxor(a1, a2, fmt=None):
"""Bitwise XOR"""
return c2repr(_inconv(a1) ^ _inconv(a2), fmt) | 13,336 |
def ensure_dir_exists(path):
"""Ensure that the directory tree to the path exists."""
split_path = os.path.splitext(path)
if split_path[0] != path: # path is file
make_path = os.path.dirname(split_path[0])
else: # path is dir
make_path = split_path[0]
os.makedirs(make_path, exist_o... | 13,337 |
def milestone_2_test_1_initial_val(lattice_grid_shape: Tuple[int, int]) -> Tuple[np.ndarray, np.ndarray]:
"""
Return initial conditions
Args:
lattice_grid_shape: lattice grid [lx, ly]
Returns:
density with 0.5, but one peak in the middle, velocities 0
"""
density = np.ones(lat... | 13,338 |
def test_element_html_call_get_attribute(monkeypatch, browser_driver):
"""Calls el_or_xpath WebElement attr get_attribute"""
called = []
class FakeWebElement:
def get_attribute(self, val):
called.append(('get_attribute', val))
return 42
@browser_driver.register
cla... | 13,339 |
def moving_dictators(session, system_ids):
"""
Show newly controlling dictators in the last 5 days.
Show all controlling dictators in monitored systems.
Subqueries galore, you've been warned.
Returns: A list of messages to send.
"""
gov_dic = session.query(Government.id).\
filter(G... | 13,340 |
def get_alt_for_q_with_constant_mach(q, mach, tol=5., SI=False, nmax=20):
# type: (float, float, float, bool, int) -> float
"""
Gets the altitude associated with a dynamic pressure.
Parameters
----------
q : float
the dynamic pressure lb/ft^2 (SI=Pa)
mach : float
the mach to... | 13,341 |
def buy(amount, token, price, account):
"""Put a buy-order for a token to the hive-engine market
"""
stm = shared_blockchain_instance()
if stm.rpc is not None:
stm.rpc.rpcconnect()
if not stm.is_hive:
print("Please set a Hive node")
return
if account is None:
acc... | 13,342 |
def addMetadataFlags(metadataChunk, numberOfMetadataChunks):
"""Adds binary flag the number of metadata chunks this upload has (uint8).
Arguments:
metadataChunk {bytes} -- First metadata chunk already encrypted, but before signing.
numberOfMetadataChunks {int} -- Self-explanatory.
Returns:
bytes -- Metadat... | 13,343 |
def save_object(obj, file_name, pickle_format=2):
"""Save a Python object by pickling it.
Unless specifically overridden, we want to save it in Pickle format=2 since this
will allow other Python2 executables to load the resulting Pickle. When we want
to completely remove Python2 backward-compatibility,... | 13,344 |
def table(a):
"""get tabular view of obj, if available, else return obj"""
if misc.istablarray(a):
return a.__view__('table')
return a | 13,345 |
def parse_quadrupole(line):
"""
Quadrupole (type 1)
V1: zedge
V2: quad gradient (T/m)
V3: file ID
If > 0, then include fringe field (using Enge function) and
V3 = effective length of quadrupole.
V4: radius (m)
V5: x misalignment error (m)
V6: y misalignment error (m)... | 13,346 |
def cases_vides(pave):
"""fonction qui cherche toutes les cases vides ayant des cases adjacentes
pleines dans un pavé (où pavé est un tableau de tuiles ou de cases vides)
retourne le tableau contenant les positions de ces cases vides et les
cases adjacentes en fonction de leur position"""
result = [... | 13,347 |
def _create_scalar_tensor(vals, tensor=None):
"""Create tensor from scalar data"""
if not isinstance(vals, (tuple, list)):
vals = (vals,)
return _create_tensor(np.array(vals), tensor) | 13,348 |
def is_importable(name):
""" Determines if a given package name can be found.
:param str name: The name of the pacakge
:returns: True if the package can be found
:rtype: bool
"""
return bool(importlib.util.find_spec(name)) | 13,349 |
def make_pipeline(*steps, **kwargs):
"""Construct a Pipeline from the given estimators.
This is a shorthand for the Pipeline constructor; it does not require, and
does not permit, naming the estimators. Instead, their names will be set
to the lowercase of their types automatically.
Parameters
... | 13,350 |
def mock_load_json():
"""Mock load_json."""
with patch("openpeerpower.components.ios.load_json", return_value={}):
yield | 13,351 |
def say_hello(name):
"""
Log client's name which entered our application and send message to it
"""
logging.info('User %s entered', name)
return 'Hello {}'.format(name) | 13,352 |
def dual_edges_2(vertices):
"""
Compute the dual edge vectors of a triangle, expressed in the
triangle plane orthonormal basis.
:param vertices: The triangle vertices (3 by n matrix with the vertices as rows (where n is the dimension of the
space)).
:returns: The triangle dual edge vectors ... | 13,353 |
def test_uncompress():
""" Test the uncompress function """
# Given
""" test_input and expected """
# When
result = uncompress(test_input)
# Then
assert result == expected | 13,354 |
def merge_dicts(dictionaries):
"""Merges multiple separate dictionaries into a single dictionary.
Parameters
----------
dictionaries : An iterable container of Python dictionaries.
Returns
-------
merged : A single dictionary that represents the result of merging the all the
... | 13,355 |
def noise_distribution_to_cost_function(
noise_distribution: Union[str, Callable]
) -> Callable[[str], str]:
"""
Parse noise distribution string to a cost function definition amici can
work with.
The noise distributions listed in the following are supported. :math:`m`
denotes the measuremen... | 13,356 |
def parc_brainmaps(gene):
"""
Generates brainplots of `gene` expression values for each manuscript
Parameters
----------
gene : str
Gene for which brainplots should be generated
"""
fig_dir = FIG_DIR / 'brainmaps'
fig_dir.mkdir(parents=True, exist_ok=True)
aparc = nib.free... | 13,357 |
def serializer(cls, o):
"""
Custom class level serializer.
"""
# You can provide a custom serialize/deserialize logic for certain types.
if cls is datetime:
return o.strftime('%d/%m/%y')
# Raise SerdeSkip to tell serde to use the default serializer/deserializer.
else:
raise ... | 13,358 |
def parse_args():
""" parse command-line arguments """
usage = """Usage: bcfg2_svnlog.py [options] -r <revision> <repos>"""
parser = OptionParser(usage=usage)
parser.add_option("-v", "--verbose", help="Be verbose", action="count")
parser.add_option("-c", "--config", help="Config file",
... | 13,359 |
def compute_com(kpt_ids, pose_keypoints):
"""Computes center of mass from available points for each pose.
Requires at least one arm (shoulder, elbow, wrist), neck and hips.
Required keypoints to return result: at least one arm with hip, neck and [nose OR ear]
:param kpt_id: IDs of keypoints in pose_ke... | 13,360 |
def test_grid_three_size_more_at_end():
"""
"""
expected = (
(0, 0), (1, 0), (2, 0), (3, 0),
(0, 1), (1, 1), (2, 1), (3, 1), (4, 1), (5, 1),
(0, 2), (1, 2), (2, 2), (3, 2), (4, 2), (5, 2), (6, 2), (7, 2),
(0, 3), (1, 3), (2, 3), (3, 3), (4, 3), (5, 3), (6, 3), (7, 3)
)
... | 13,361 |
def certificate(cert_name):
"""Return the path to the PEM file with the given name."""
return os.path.join(os.path.dirname(__file__), 'lib', cert_name) | 13,362 |
def _MinimumLineCount(text: str, min_line_count: int) -> str:
"""Private implementation of minimum number of lines.
Args:
text: The source to verify the line count of.
Returns:
src: The unmodified input src.
Raises:
NoCodeException: If src is less than min_line_count long.
"""
if len(text.str... | 13,363 |
def RationalQuadratic1d(
grid,
corrlen,
sigma,
alpha,
prior=None,
mu_basis=None,
mu_hyper=None,
energy=0.99
) -> Formula:
"""Rational quadratic kernel formula
"""
kernel_kwargs = {
"corrlen": corrlen,
"sigma": sigma,
"a... | 13,364 |
def pandas2csv(df, out_file, index=False, header=True):
"""Write pandas dataframe or series to CSV file;
see pandas2file for other side effects
Args:
df - pandas dataframe or series
out_file - file to which df should be written
index - write index to file?
header - write header row ... | 13,365 |
def plot_feature_importance(feature_keys, feature_importances, ax=None, **kwargs):
"""
Plot features importance after model training (typically from scikit-learn)
Parameters
----------
feature_keys: list of string
feature_importances: `numpy.ndarray`
ax: `matplotlib.pyplot.axes`
Return... | 13,366 |
def test_atomic_g_month_max_inclusive_2_nistxml_sv_iv_atomic_g_month_max_inclusive_3_3(mode, save_output, output_format):
"""
Type atomic/gMonth is restricted by facet maxInclusive with value
--04.
"""
assert_bindings(
schema="nistData/atomic/gMonth/Schema+Instance/NISTSchema-SV-IV-atomic-gM... | 13,367 |
def simulate_timestamps_till_horizon(mu, alpha, beta, Thorizon = 60, \
seed=None, node=None, output_rejected_data=False):
"""
Inputs:
mu, alpha, beta are parameters of intensity function of HP
"""
#################
# Initialisation
#################
rng = default_rng(seed) ... | 13,368 |
def jp_inference_on_dataset(model, data_loader, evaluator):
"""
Run model on the data_loader and evaluate the metrics with evaluator.
Also benchmark the inference speed of `model.forward` accurately.
The model will be used in eval mode.
Args:
model (nn.Module): a module which accepts an obj... | 13,369 |
def boolToYes(b):
"""Convert a Boolean input into 'yes' or 'no'
Args:
b (bool): The Boolean value to be converted
Returns:
str: 'yes' if b is True, and 'no' otherwise.
"""
if b:
return "yes"
else:
return "no" | 13,370 |
def top_symptoms(dic, title):
"""Find and plot top symptoms in the dictionary based on count
Args:
dic (dict): Dictionary containing text-count pair
Returns:
[dictionary]: Top 5 symptoms with their count
"""
assert isinstance(dic, dict) and len(dic) > 0, "dic is not a nonempty dict... | 13,371 |
def _calc_z(h: DataArray, zice: DataArray, zeta: DataArray,
s: DataArray, Cs: DataArray,
hc: float, Vtransform: int) -> DataArray:
"""
Calculate grid z-coord depth given water depth (h), iceshelf depth (zice),
sea surface (zeta), and vertical grid transformation parameters.
Inpu... | 13,372 |
def gap_loss(preds, D, A):
"""
This module implement the loss function in paper [Azada Zazi, Will Hang. et al, 2019] Nazi, Azade & Hang, Will & Goldie, Anna & Ravi, Sujith & Mirhoseini, Azalia. (2019). GAP: Generalizable Approximate Graph Partitioning Framework.
Args:
preds (tensor(float)): output ... | 13,373 |
def do_eval_standalone(args_opt):
"""
do eval standalone
"""
ckpt_file = os.path.join(args_opt.model_dir, args_opt.task_name)
ckpt_file = get_ckpt(ckpt_file)
print('ckpt file:', ckpt_file)
task = task_cfg[args_opt.task_name]
student_net_cfg.seq_length = task.seq_length
eval_cfg.batch... | 13,374 |
def test_stager(rse_factory, did_factory, root_account, replica_client):
"""
Submit a real transfer to FTS and rely on the gfal "mock" plugin to report a simulated "success"
https://gitlab.cern.ch/dmc/gfal2/-/blob/master/src/plugins/mock/README_PLUGIN_MOCK
"""
src_rse, src_rse_id = rse_factory.make_... | 13,375 |
def number_of_hole(img, hole_img, hole_counter):
""" 判斷hole的數量去執行相對應的函式
0個hole執行zero_of_hole
1個hole執行one_of_hole
2個hole執行my_text.set("Answer : 8")
大於2個hole則執行my_text.set("Error : holes number = " + str(hole_counter) + "( > 2 )")) """
switcher = {
... | 13,376 |
def get_merged_by_value_coords(spans_value, digits=None):
"""returns adjacent spans merged if they have the same value. Assumes
[(start, end, val), ..] structure and that spans_value is sorted in
ascending order.
Arguments:
- digits: if None, any data can be handled and exact values are
... | 13,377 |
def vint_mask_for_length(length):
"""
Returns the bitmask for the first byte of a variable-length integer (used for element ID and size descriptors).
:arg length: the length of the variable-length integer
:type length: int
:returns: the bitmask for the first byte of the variable-length integer
:rtype: int
... | 13,378 |
def parse(tokens):
"""
S-expr ::= ( S-expr* ) | AtomSymbol | ' S-expr
' S-expr = (quote S-expr)
"""
from itertools import tee
def _parse(tokens):
while True:
token = next(tokens)
if token == "(":
s_expr = []
while True:
... | 13,379 |
def test_bad_predict():
"""Test bad input to predict."""
secs = pysecs.SECS(sec_df_loc=[[1., 0., R_EARTH + 1e6],
[-1., 0., R_EARTH + 1e6]])
# Calling predict with the wrong shape
pred_loc = np.array([[0, 0]])
with pytest.raises(ValueError, match="Prediction locati... | 13,380 |
def preproc(raw,
dark=None,
flat=None,
solidangle=None,
polarization=None,
absorption=None,
mask=None,
dummy=None,
delta_dummy=None,
normalization_factor=1.0,
empty=None,
split_result=Fals... | 13,381 |
def lB_2_T(lB, T0=298, sigma=4E-10, ret_res=False):
"""Solves for temperature at given Bjerrum length under condition from Adhikari et al. 2019 that lB/l = 1.2 at 298 K."""
def cond(T, lB, sigma=sigma):
"""condition function whose root gives the temperature T given Bjerrum length lB."""
return l... | 13,382 |
def make_datacls(
cls_name: str,
fields: Iterable[Union[tuple[str, type], tuple[str, type, dataclasses.Field]]],
init: bool = True,
**kwargs,
) -> type:
"""
Return a new dataclass. This function wraps the Python dataclasses.make_dataclass
function, with the following changes to the generated... | 13,383 |
def square_loss(X, y, theta, reg_beta=0.0):
"""Computes squared loss and gradient.
Based on mean square margin loss.
X: (k, n) data items.
y: (k, 1) result (+1 or -1) for each data item in X.
theta: (n, 1) parameters.
reg_beta: optional regularization strength, for L2 regularization.
Retu... | 13,384 |
def test_add_node(empty_dag):
"""Test add node."""
dag = empty_dag
dag.add_node('a')
assert dag.graph == {'a': set()} | 13,385 |
def open_mf_wrf_dataset(paths, chunks=None, compat='no_conflicts', lock=None,
preprocess=None):
"""Open multiple WRF files as a single WRF dataset.
Requires dask to be installed. Note that if your files are sliced by time,
certain diagnostic variable computed out of accumulated var... | 13,386 |
def main():
"""Plot barchart from COGclassifier count results"""
# Get argument values
args = get_args()
infile: Path = args.infile
outfile: Path = args.outfile
width: int = args.width
height: int = args.height
bar_width: int = args.bar_width
y_limit: Optional[int] = args.y_limit
... | 13,387 |
def parse_args_and_add_yaml_variables(parser: ArgumentParser,
yaml_config_file: Optional[Path] = None,
project_root: Optional[Path] = None,
fail_on_unknown_args: bool = False) -> ParserResult:
"""
R... | 13,388 |
def load_graph(model_file):
"""Loads a TensorFlow graph from file."""
graph = tf.Graph()
with graph.as_default():
od_graph_def = tf.GraphDef()
with tf.gfile.GFile(model_file, 'rb') as fid:
serialized_graph = fid.read()
od_graph_def.ParseFromString(serialized_graph)
tf.import_graph_def(o... | 13,389 |
def report_failure(result: Dict[str, Any], context: Dict[str, Any]):
"""Write out failure information from a given check in a context."""
prepare_context_with_error_information(context=context, result=result)
print_validation_error(context=context) | 13,390 |
def find_children(node, tag, xml_ns, ns_key):
"""
Finds the collection of children nodes
Parameters
----------
node : ElementTree.Element
tag : str
xml_ns : None|dict
ns_key : None|str
"""
if xml_ns is None:
return node.findall(tag)
elif ns_key is None:
retu... | 13,391 |
def normalize_string(string, ignore_spaces, ignore_punctuation):
"""Normalizes strings to prepare them for crashing comparison."""
string = string.upper()
if ignore_punctuation:
string = re.sub(r"[^1-9a-z \n\r\t]", "", string, flags=re.I)
if ignore_spaces:
string = re.sub(r"\w+", "", str... | 13,392 |
def walkfiles(thisdir):
"""
walkfiles(D) -> iterator over files in D, recursively. Yields full file paths.
Adapted from path.py by Jason Orendorff.
"""
for child in os.listdir(thisdir):
thischild = join(thisdir, child)
if isfile(thischild):
yield thischild
el... | 13,393 |
def create_sitemap(app: sphinx.application.Sphinx, exception):
"""Generates the sitemap.xml from the collected HTML page links"""
if (
not app.config["html_theme_options"].get("site_url", "")
or exception is not None
or not app.sitemap_links
):
return
filename = app.outd... | 13,394 |
def word_to_forms(word):
"""Return all possible forms for a word.
Args:
word (unicode)
Returns:
forms (set[unicode])
"""
forms = set()
lemmas = lemmatize(word)
for lemma in lemmas:
forms.update(lemma_to_forms(lemma))
return forms | 13,395 |
def load_decamCorners():
"""
Returns the CCD corners of the DECam camera.
Returns:
decamCorners : *list* of *float*
A list of the angular degree offsets of the CCD corners.
"""
with open('%s/DECam_corners.dat' % data_dir) as f:
corners_dct = eval(''.join(f.readlines()))
... | 13,396 |
def reduce_expr(expr):
"""
Reduces a boolean algebraic expression based on the identity X + XY = X
Args:
expr (str): representation of the boolean algebraic expression
Returns:
A string representing the reduced algebraic expression
"""
reduced = True
... | 13,397 |
def test_spread_radix_sort():
"""
Test spread numbers array with radix_sort.
"""
left, right = radix_sort_test([0xffff, 0xfffe, 0xfffd, 3, 2, 1])
assert left == right | 13,398 |
def get_mask(images, b_threshold=0) :
""" Return a mask computed from baseline image
"""
b0 = baseline(images, b_threshold)
skull_mask = medipy.segmentation.skull(b0)
skull = medipy.logic.apply_mask(b0, skull_mask, 0, 0)
directory = tempfile.mkdtemp()
medipy.io.save(skull, os.... | 13,399 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.