content stringlengths 22 815k | id int64 0 4.91M |
|---|---|
def _get_tests(tier):
"""Return a generator of test functions."""
return TEST_TIERS[tier] | 15,200 |
def get_first_of_iterable(iterable):
"""
Return the first element of the given sequence.
Most useful on generator types.
:param iterable iterable: input iterable
:returns: tuple(iterable, first_element). If a generator is passed,
a new generator will be returned preserving the original val... | 15,201 |
def getTiers(connection=None):
"""
"""
# Open the master database if it is not supplied.
flag = False
if connection is None:
connection = sqlite3.connect(MASTER)
flag = True
# Create a cursor from the connection.
cursor = connection.cursor()
# Execute the statement to ... | 15,202 |
def _on_execute_sync(ctx: ExecutionContext, step: WorkflowStep):
"""Performs synchronous step work.
"""
# If unit of work is complete then signal step end.
if step.result is None:
do_step_verification.send(ctx)
# Enqueue message batch (with completion callback).
elif isinstance... | 15,203 |
def get_repr_type(type_: Any) -> Any:
"""Parse a type and return an representative type.
Example:
All of the following expressions will be ``True``::
get_repr_type(A) == A
get_repr_type(Annotated[A, ...]) == A
get_repr_type(Union[A, B, ...]) == A
get_rep... | 15,204 |
def get_month_range_from_dict(source):
"""
:param source: dictionary with keys 'start' and 'end
:return: a tuple of datatime objects in the form (start, end)
"""
now = timezone.now()
start = source.get('start')
end = source.get('end', datetime.datetime(now.year, now.month, calendar.monthran... | 15,205 |
def softmax_ad_set_dim_func(head, data, axis):
"""Look up the softmax_ad_set_dim_map, and return hash_value, hash_key."""
key = []
key.append(tuple(data.shape))
key.append(data.dtype)
key.append(axis)
hash_key = str(tuple(key))
if hash_key in softmax_ad_set_dim_map.keys():
return ct... | 15,206 |
def parse_latency_stats(fp):
"""
Parse latency statistics.
:param fp: the file path that stores the statistics
:returns an average latency in milliseconds to connect a pair of initiator and responder clients
"""
latency = []
with open(fp) as csvfile:
csvreader = csv.DictReader(csvfil... | 15,207 |
def get_tweets(input, out_dir, ext):
"""
This function takes the list of individuals with the periods list and runs twint for each period. It stores the result in a csv file called c.Output and returns the dictionary of uncollected names and periods.
"""
counter = 0
uncollected = {}
total_uncoll... | 15,208 |
def bootstrap_dev_server_acls():
"""Adds default pools.cfg."""
assert utils.is_local_dev_server()
global _LOCAL_FAKE_CONFIG
_LOCAL_FAKE_CONFIG = _PoolsCfg(
{
'default': PoolConfig(
name='default',
rev='pools_cfg_rev',
scheduling_users=frozenset([
a... | 15,209 |
def test_plugin_telegram_general(mock_post, mock_get):
"""
NotifyTelegram() General Tests
"""
# Disable Throttling to speed testing
plugins.NotifyBase.request_rate_per_sec = 0
# Bot Token
bot_token = '123456789:abcdefg_hijklmnop'
invalid_bot_token = 'abcd:123'
# Chat ID
chat_... | 15,210 |
def micro_jaccard(y_true, y_pred):
"""
Calculate the micro Jaccard-score, i.e. TP / (TP + FP + FN).
:param y_true: `numpy.array` of shape `(n_samples,)` or `(n_samples, n_classes)`. True labels or class assignments.
:param y_pred: `numpy.array` of shape `(n_samples,)` or `(n_samples, n_classes)`. Predi... | 15,211 |
def round_to_nreads(number_set, n_reads, digit_after_decimal=0):
"""
This function take a list of number and return a list of percentage, which represents the portion of each number in sum of all numbers
Moreover, those percentages are adding up to 100%!!!
Notice: the algorithm we are using here is 'Lar... | 15,212 |
def _get_raster_extent(src):
"""
extract projected extent from a raster dataset
(min_x, max_x, min_y, max_y)
Parameters
----------
src : gdal raster
Returns
-------
(min_x, max_x, min_y, max_y)
"""
ulx, xres, xskew, uly, yskew, yres = src.GetGeoTransform()
lrx = ulx + (... | 15,213 |
def joined_table_table_join_args(joined_table: SQLParser.JoinedTableContext) -> dict:
"""
Resolve a joinedTable ParseTree node into relevant keyword arguments for TableJoin.
These will be pushed down and applied to the child TableRef.
"""
assert isinstance(joined_table, SQLParser.JoinedTableContext)... | 15,214 |
def find_by_name(name):
"""
Find and return a format by name.
:param name: A string describing the name of the format.
"""
for format in FORMATS:
if name == format.name:
return format
raise UnknownFormat('No format found with name "%s"' % name) | 15,215 |
def test_restart(throttle_obj, profiler):
"""
Tests the behavior of a single Throttle instance iterating during two
periods of time separate separated by a short sleep. After this break,
the Throttle is restarted to check that the behavior is the same in
the two periods.
"""
ticks = -1
w... | 15,216 |
def test_endpoints(host, port, use_ssl, endpoints):
"""
Test each endpoint with its associated method and compile lists of endpoints that
can and cannot be accessed without prior authentication
"""
conn = get_conn(host, port, use_ssl)
if not conn:
sys.exit("Failed to connect to host {}, ... | 15,217 |
def create_neighborhood_polygons(gdf):
""" an attempt to muild neighborhoods polygons from asset points"""
import numpy as np
gdf = gdf.reset_index()
neis = gdf['Neighborhood'].unique()
gdf['neighborhood_shape'] = gdf.geometry
# Must be a geodataframe:
for nei in neis:
gdf1 = gdf[gdf... | 15,218 |
def custom_gradient(f=None):
"""Decorator to define a function with a custom gradient.
This decorator allows fine grained control over the gradients of a sequence
for operations. This may be useful for multiple reasons, including providing
a more efficient or numerically stable gradient for a sequence of oper... | 15,219 |
def Field(name,
ctype,
field_loader=FieldLoaderMethod.OPTIONAL,
comment=None,
gen_setters_and_getters=True):
"""Make a field to put in a node class.
Args:
name: field name
ctype: c++ type for this field
Should be a ScalarType like an int, string or enum ty... | 15,220 |
def sql(dataframe: pd.DataFrame) -> Tuple[pd.DataFrame, pd.DataFrame, List[str], str]:
"""Infer best fit data types using dataframe values. May be an object converted to a better type,
or numeric values downcasted to a smallter data type.
Parameters
----------
dataframe (pandas.DataFrame) : contain... | 15,221 |
def sync_cmdb(api):
"""
将ebs信息入库
:return:
"""
ebs_list = api.main()
with DBContext('w') as session:
# 清除数据库数据
try:
session.query(DB).delete()
session.commit()
except:
session.rollback()
# 写入新数据
for rds in ebs_list:
... | 15,222 |
def calculateDeviation(img, lineLeft,lineRight, ):
"""This function calculates
the deviation of the vehicle from the center of the
image
"""
frameCenter = np.mean([lineLeft.bestx,lineRight.bestx] , dtype=np.int32)
imgCenter = img.shape[1]//2
dev = frameCenter - imgCenter
xm_p... | 15,223 |
def post_new_tracker_story(message, project_id, user):
"""Posts message contents as a story to the bound project."""
if ";" in message:
name, description = message.split(";", maxsplit=1)
else:
name, description = (message, "")
story_name = "{name} (from {user})".format(
name=name... | 15,224 |
def nz2epsmu(N, Z):#{{{
""" Accepts index of refraction and impedance, returns effective permittivity and permeability"""
return N/Z, N*Z | 15,225 |
def logdet_symm(m, check_symm=False):
"""
Return log(det(m)) asserting positive definiteness of m.
Parameters
----------
m : array-like
2d array that is positive-definite (and symmetric)
Returns
-------
logdet : float
The log-determinant of m.
"""
if check_symm:... | 15,226 |
def merge(left, right):
""" Merge helper
Complexity: O(n)
"""
arr = []
left_cursor, right_cursor = 0, 0
while left_cursor < len(left) and right_cursor < len(right):
# Sort each one and place into the result
if left[left_cursor] <= right[right_cursor]:
arr.append(l... | 15,227 |
def validate_form_data(FORM_Class):
"""
Validates the passed form/json data to a request and passes the
form to the called function.
If form data is not valid, return a 406 response.
"""
def decorator(f):
@wraps(f)
def decorated_function(*args, **kwargs):
form = FOR... | 15,228 |
def set_axis_tick_format(
ax, xtickformat=None, ytickformat=None, xrotation=0, yrotation=0
):
"""Sets the formats for the ticks of a single axis
:param ax: axis object
:param xtickformat: optional string for the format of the x ticks
:param ytickformat: optional string for the format of the y ticks... | 15,229 |
def two_body(y, t):
"""
Solves the two body problem
:param y: state vector
y = [rx,ry,rz,vx,vy,vz]
:param t: time
:return: dy
"""
rx, ry, rz = y[0], y[1], y[2]
vx, vy, vz = y[3], y[4], y[5]
r = np.array([rx, ry, rz])
v = np.array([vx, vy, vz])
r_mag = np.linalg.norm... | 15,230 |
def args_parse_params(params):
""" create simple arg parser with default values (input, output)
:param dict dict_params:
:return obj: object argparse<...>
"""
parser = argparse.ArgumentParser()
parser.add_argument(
'-i',
'--path_in',
type=str,
required=True,
... | 15,231 |
def DD_carrier_sync(z,M,BnTs,zeta=0.707,type=0):
"""
z_prime,a_hat,e_phi = DD_carrier_sync(z,M,BnTs,zeta=0.707,type=0)
Decision directed carrier phase tracking
z = complex baseband PSK signal at one sample per symbol
M = The PSK modulation order, i.e., 2, 8, or 8.
BnTs = t... | 15,232 |
def rubrik_gps_vm_snapshot_create(client: PolarisClient, args: Dict[str, Any]) -> CommandResults:
"""
Trigger an on-demand vm snapshot.
:type client: ``PolarisClient``
:param client: Rubrik Polaris client to use
:type args: ``dict``
:param args: arguments obtained from demisto.args()
:ret... | 15,233 |
def getFullCorpus(emotion, speakerID = None):
"""
Return the 6 speakers files in a massive vstack
:param emotion:
:param speakerID:
:return:
"""
if emotion not in emotions or (speakerID is not None and speakerID not in speakers):
raise Exception("No Such speaker: {} or emotion: {}".... | 15,234 |
def list_public(config):
"""List public datasets."""
data = [
["ID", "STATUS", "NAME", "SIZE"],
["-" * 80, "-" * 80, "-" * 80, "-" * 80],
]
datasets = config.trainml.run(config.trainml.client.datasets.list_public())
for dset in datasets:
data.append(
[
... | 15,235 |
def CT_freezing_first_derivatives(SA, p, saturation_fraction):
"""
Calculates the first derivatives of the Conservative Temperature at
which seawater freezes, with respect to Absolute Salinity SA and
pressure P (in Pa).
Parameters
----------
SA : array-like
Absolute Salinity, g/kg
... | 15,236 |
def run(gParameters):
"""
Runs the model using the specified set of parameters
Args:
gParameters: a python dictionary containing the parameters (e.g. epoch)
to run the model with.
"""
#
if 'dense' in gParameters:
dval = gParameters['dense']
if type(dval) != list:
... | 15,237 |
def get_territory_center(territory: inkex.Group) -> inkex.Vector2d:
"""
Get the name of the territory from its child title element. If no title, returns
Warzone.UNNAMED_TERRITORY_NAME
:param territory:
:return:
territory name
"""
center_rectangle: inkex.Rectangle = territory.find(f"./{Sv... | 15,238 |
def batch_norm_relu(inputs, is_training, data_format):
"""Performs a batch normalization followed by a ReLU."""
# We set fused=True for a significant performance boost. See
# https://www.tensorflow.org/performance/performance_guide#common_fused_ops
inputs = tf.layers.batch_normalization(
inputs=input... | 15,239 |
def file_opener(
fname: str,
cache: Optional[CacheFSSpecTarget] = None,
copy_to_local: bool = False,
bypass_open: bool = False,
secrets: Optional[dict] = None,
**open_kwargs,
) -> Iterator[Union[OpenFileType, str]]:
"""
Context manager for opening files.
:param fname: The filename /... | 15,240 |
def function_handler(event, context):
"""
Shows how to access local resources in an AWS Lambda function.
Gets volume information for the local file system and publishes it.
Writes a file named 'test' and then reads the file and publishes its contents.
"""
iot_client.publish(topic='LRA/test', pay... | 15,241 |
def create_success_status(found_issue):
"""Create a success status for when an issue number was found in the title."""
issue_number = found_issue.group("issue")
url = f"https://bugs.python.org/issue{issue_number}"
return util.create_status(STATUS_CONTEXT, util.StatusState.SUCCESS,
... | 15,242 |
def check(args: 'Namespace'):
"""Check jina config, settings, imports, network etc"""
from jina.checker import ImportChecker
ImportChecker(args) | 15,243 |
def stop_compose():
"""
停止compose服务
:return:
"""
home_path = os.path.abspath(os.path.join(os.getcwd(), "..")) + '/loonflow_shutongflow'
cmd_str = 'cd {}&&docker-compose stop'.format(home_path)
flag, result = run_cmd(cmd_str)
if flag:
print('-' * 30)
print('停止服务成功')
el... | 15,244 |
def score_to_rating_string(score):
"""
Convert score to rating
"""
if score < 1:
rating = "Terrible"
elif score < 2:
rating = "Bad"
elif score < 3:
rating = "OK"
elif score < 4:
rating = "Good"
else:
rating = "Excellent"
return rating | 15,245 |
def deconv1d_df(t, observed_counts, one_sided_prf, background_count_rate, column_name='deconv', same_time=True,
deconv_func=emcee_deconvolve, **kwargs):
"""
deconvolve and then return results in a pandas.DataFrame
"""
#print("working on chunk with length {}".format(len(observed_counts)))
... | 15,246 |
def create_parser() -> ArgumentParser:
"""Create a parser instance able to parse args of script.
return:
Returns the parser instance
"""
parser = ArgumentParser()
version = get_distribution('hexlet-code').version
parser.add_argument('first_file', help='path to JSON or YAML file')
pa... | 15,247 |
def _create_fake_users(usernames):
"""Create fake Users with the listed usernames"""
for user in usernames:
User.objects.create(
username=user[:30],
password='fakepassword',
email=user,
is_active=True,
) | 15,248 |
def show_mirror(args):
"""
Add port mirror session
"""
session(args.session) | 15,249 |
def parse_alignment_file(file_path):
"""Parse the buildAlignment.tsv output file from CreateHdpTrainingData
:param file_path: path to alignment file
:return: panda DataFrame with column names "kmer", "strand", "level_mean", "prob"
"""
assert os.path.exists(file_path), "File path does not exist: {}"... | 15,250 |
def remap_key(ctx, origin_key, destination_key, *, mode=None, level=None):
"""Remap *origin_key* to *destination_key*.
Returns an instance of :class:`RemappedKey`.
For valid keys refer to `List of Keys
<https://www.autohotkey.com/docs/KeyList.htm>`_.
The optional keyword-only *mode* and *level* a... | 15,251 |
def get_anime_list(wf):
"""Get an Animelist instance.
:param Workflow3 wf: the Workflow3 object
:returns: Animelist object
:rtype: Animelist
"""
try:
animelist = Animelist(
wf.settings['UID'], wf.get_password('bangumi-auth-token')
)
except Exception as e:
... | 15,252 |
def __saveMapping():
"""Save in memory dictinary cache to disc"""
for dbName, d in lemma_mappings.items():
if not d is None:
os.makedirs(CACHE, exist_ok=True)
pickle.dump(d, open(os.path.join(CACHE, dbName), 'wb')) | 15,253 |
def use_linear_strategy():
"""
Uses a linear function to generate target velocities.
"""
max_velocity = kmph2mps(rospy.get_param("~velocity", 40))
stop_line_buffer = 2.0
def linear_strategy(distances_to_waypoints, current_velocity):
# Target velocity function should be a line
# ... | 15,254 |
def recomputation_checkpoint(module: nn.Module):
"""Annotates the output of a module to be checkpointed instead of
recomputed"""
def recompute_outputs(module, inputs, outputs):
return tuple(poptorch.recomputationCheckpoint(y) for y in outputs)
return module.register_forward_hook(recompute_outp... | 15,255 |
def get_random_quote() -> str:
"""Retrieve a random quote from the Forismatic API.
Returns:
str: The retrieved quote
"""
quote = ""
while quote == "":
response = requests.get(
"http://api.forismatic.com/api/1.0/?method=getQuote&lang=en&format=json"
)
if r... | 15,256 |
def main():
"""
Implements the first step of the experiment pipeline. Creates a series of \
X_train, X_test pairs (based on different features parameters \
combinations) for each one of the folds.
Returns:
None
"""
# Construct argument parser and parse argument
ap = argparse.Arg... | 15,257 |
def has_datapoint(fake_services, metric_name=None, dimensions=None, value=None, metric_type=None, count=1):
"""
Returns True if there is a datapoint seen in the fake_services backend that
has the given attributes. If a property is not specified it will not be
considered. Dimensions, if provided, will ... | 15,258 |
def optimizeMemoryUsage(foregroundTasks, backgroundTasks, K):
"""
:type foregroundTasks: List[int]
:type backgroundTasks: List[int]
:type K: int
:rtype: List[List[int]]
"""
res = []
curr_max = 0
if len(foregroundTasks) == 0:
for j in range(len(backgroundTasks)):
... | 15,259 |
def test_empty_package_method_name():
"""fully_qualified_service works when there's no package."""
mn = MethodName("", "SearchService", "Search")
assert mn.fully_qualified_service == "SearchService" | 15,260 |
def fetch_commits():
"""Yields batches of commits from the DB."""
count_cursor = DB.cursor()
count_cursor.execute(COUNT_SQL)
count = count_cursor.fetchone()['MAX(order_id)']
read_cursor = DB.cursor()
for start in range(0, count + BATCH_SIZE, BATCH_SIZE):
print 'Starting at', start
read_cursor.execu... | 15,261 |
def cv_indices(num_folds,num_samples):
"""
Given number of samples and num_folds automatically create a subjectwise cross validator
Assumption: per subject we have 340 samples of data
>>> cv_set = cv_indices(2,680)
>>> cv_set
>>> (([0:340],[340:680]),([340:680,0:340]))
Algo:
1.Co... | 15,262 |
def load_schema(schema_name: str) -> dict:
"""Load a JSON schema.
This function searches within apollon's own schema repository.
If a schema is found it is additionally validated agains Draft 7.
Args:
schema_name: Name of schema. Must be file name without extension.
Returns:
Sche... | 15,263 |
def dataset_ls(prj, dataset):
""" List the contents of a dataset.
"""
validate_name(dataset, "dataset")
client = boto3.client("s3")
prefix = prj.s3.path("/datasets/{}/".format(dataset))
len_prefix = len(prefix)
response = client.list_objects_v2(Bucket=prj.s3.bucket(), Prefix=prefix)
for ... | 15,264 |
def shorten_sequence(base_sequence, seq_to_fitness, program):
"""Tries to shorten this sequence by omitting flag by flag and checking if
the smaller sequence has at least the same fitness value as the
original one.
"""
key_base_sequence = str(base_sequence)
sequences = set()
current_... | 15,265 |
def create_flatmap_from_dm_command(dm_command_path, output_path, file_name=None, dm_num=1):
"""
Converts a dm_command_2d.fits to the format used for the flatmap, and outputs a new flatmap fits file.
:param dm_command_path: Full path to the dm_command_2d.fits file.
:param output_path: Path to output the ... | 15,266 |
def append_lightcone_id(block_num, step_num, tbl):
"""Assigns a unique ID to each row in the astropy table, with block
and step embedded into the id. The id will read as
XXXYYYZZZZZZZZZZ (decimal), were xxx is the block number, yyy is
the step number and zzzzzzzzzz is unique id for this block/step
c... | 15,267 |
def barGraph(data, ylabel='', title='', xticklabels=None):
"""
Displays all of the data points in data as a series of bars.
Optionally a user can provide a label for the y-axis, a title, and
tick labels for the bars.
"""
N = len(data) # Number of data points
width = 0.50 # the width of the bars
offset ... | 15,268 |
def delete_group_from_ldap(group, node=None, exitcode=0):
"""Delete group entry from LDAP.
"""
if node is None:
node = current().context.ldap_node
with By(f"deleting group {group['dn']}"):
r = node.command(
f"ldapdelete -x -H ldap://localhost -D \"cn=admin,dc=company,dc=com\... | 15,269 |
def question_12(data):
"""
Question 12 linear transform the data, plot it, and show the newly created cov matrix.
:param data: data
:return: data after linear transformation
"""
s_mat = np.array([[0.1, 0, 0], [0, 0.5, 0], [0, 0, 2]])
new_data = np.matmul(s_mat, data)
plot_3d(new_data, "Q... | 15,270 |
def update_storage(user_choice):
"""It updates the Coffee Machine resources after a beverage is ordered."""
resources["water"] = resources["water"] - MENU[user_choice]["ingredients"]["water"]
resources["milk"] -= MENU[user_choice]["ingredients"]["milk"]
resources["coffee"] -= MENU[user_choice]["ingredie... | 15,271 |
def process_callback(callback):
""" Process a callback """
global total
# Read variables
total += 1
# args = callback['args']
body_data = callback['body']['data']
# body_size = callback['body']['size']
date = callback['date']
headers = callback['headers']
id_ = callback['id']
... | 15,272 |
def second_test_function(dataset_and_processing_pks):
"""
Pass a result of JSON processing to a function that saves result on a model.
:param dataset_and_processing_pks: tuple of two (Dataset PK, Processing PK)
:return: tuple of two (Dataset PK; JSON (Python's list of dicts))
"""
# unpack tuple... | 15,273 |
def prepend_python_path(path: str) -> Iterator[None]:
"""Simple context manager to help import module within the repo"""
try:
# Entering the with statement
sys.path.insert(0, path)
yield
finally:
# Exiting the with statement
sys.path.remove(path) | 15,274 |
def matplotlib_axes_from_gridspec_array(arr, figsize=None):
"""Returned axes layed out as indicated in the array
Example:
--------
>>> # Returns 3 axes layed out as indicated by the array
>>> fig, axes = matplotlib_axes_from_gridspec_array([
>>> [1, 1, 3],
>>> [2, 2, 3],
>>> ... | 15,275 |
def test_return_stmt():
"""Test that the return statement functions correctly."""
decl = emptyfn("return 1;")
compile(decl) | 15,276 |
def booleanGenerator():
"""
Creates a generator which returns only True and False.
"""
gen = valueFromSetGenerator([True, False])
while True:
yield gen.next() | 15,277 |
def _fetch_gene_annotation(gene, gtf):
"""
Fetch gene annotation (feature boundaries) and the corresponding sequences.
Parameters:
-----------
gene
gene name that should be found in the "gene_name" column of the GTF DataFrame.
type: str
gtf
GTF annotation D... | 15,278 |
def deploy_static():
"""
Deploy static (application) versioned media
"""
if not env.STATIC_URL or 'http://' in env.STATIC_URL: return
from django.core.servers.basehttp import AdminMediaHandler
remote_dir = '/'.join([deployment_root(),'env',env.project_fullname,'static'])
m_prefix = len(... | 15,279 |
def T_SFLU_DRFPMI_show_full(dprint, tpath_join, fpath_join):
"""
Show a graph reduction using networkx+tikz
"""
sflu = SFLU.SFLU(
DRFPMI_edges,
graph=True,
)
# match=False allows a reduced input/output set
sflu.graph_nodes_pos(DRFPMI_locs, match=True)
#sflu.graph_nodes_po... | 15,280 |
def randomPolicy(Ts):
""" Each action is equally likely. """
numA = len(Ts)
dim = len(Ts[0])
return ones((dim, numA)) / float(numA), mean(array(Ts), axis=0) | 15,281 |
def calc_TiTiO2(P, T):
"""
Titanium-Titanium Oxide (Ti-TiO2)
================================
Define TiTiO2 buffer value at 1 bar
Parameters
----------
P: float
Pressure in GPa
T: float or numpy array
Temperature in degrees K
Returns
-------
float or numpy array
log_fO2
References
----------
Bar... | 15,282 |
def nlevenshtein_scoredistance(first_data, memento_data):
"""Calculates the Normalized Levenshtein Distance given the content in
`first_data` and `memento_data`.
"""
score = compute_scores_on_distance_measure(
first_data, memento_data, distance.nlevenshtein)
return score | 15,283 |
def parameter_from_numpy(model, name, array):
""" Create parameter with its value initialized according to a numpy tensor
Parameters
----------
name : str
parameter name
array : np.ndarray
initiation value
Returns
-------
mxnet.gluon.parameter
a parameter object... | 15,284 |
def test_get_filetype_with_unsupported_filetype_raises_exception():
"""Test all the unsupported file types, where file type is inferred via file extension."""
unsupported_filetype = "sample.inexistent"
with pytest.raises(ValueError) as exc_info:
get_filetype(unsupported_filetype)
expected_msg = ... | 15,285 |
def execute_workflow_command():
"""Command that executes a workflow."""
return (
Command().command(_execute_workflow).require_migration().require_clean().with_database(write=True).with_commit()
) | 15,286 |
def fast_parse(python_class, parse_function, data_to_parse, number_of_workers=4, **kwargs):
"""
Util function to split any data set to the number of workers,
Then return results using any give parsing function
Note that when using dicts the Index of the Key will be passed to the function
... | 15,287 |
def create_plate(dim=DIMENSION, initial_position=-1):
"""
Returns a newly created plate which is a matrix of dictionnaries (a matrix of cells) and places the first crystal cell in it at the inital_pos
The keys in a dictionnary represent the properties of the cell
:Keys of the dictionnary:
-... | 15,288 |
def CanonicalizeName(raw_name: Text):
"""Strips away all non-alphanumeric characters and converts to lowercase."""
unicode_norm = unicodedata.normalize('NFKC', raw_name).lower()
# We only match Ll (lowercase letters) since alphanumeric filtering is done
# after converting to lowercase. Nl and Nd are numeric-lik... | 15,289 |
def data_v1( request ):
""" Handles all /v1/ urls. """
( service_response, rq_now, rq_url ) = ( {}, datetime.datetime.now(), common.make_request_url(request) ) # initialization
dump_param_handler = views_helper.DumpParamHandler( rq_now, rq_url )
if request.GET.get( 'data', '' ) == 'dump':
retur... | 15,290 |
def ssh(server, cmd, checked=True):
""" Runs command on a remote machine over ssh."""
if checked:
return subprocess.check_call('ssh %s "%s"' % (server, cmd),
shell=True, stdout=sys.stdout)
else:
return subprocess.call('ssh %s "%s"' % (server, cmd),
... | 15,291 |
def permutations(n, r=None):
"""Returns the number of ways of arranging r elements of a set of size n in
a given order - the number of permuatations.
:param int n: The size of the set containing the elements.
:param int r: The number of elements to arange. If not given, it will be\
assumed to be eq... | 15,292 |
def backtrace_warn(msg, back_trace_len=0):
"""
warning msg with backtrace support
"""
try:
msg = _log_file_func_info(msg, back_trace_len)
logging_instance = _LoggerInstance()
logging_instance.get_logger().warn(msg)
except u_exception.LoggerException:
return
except... | 15,293 |
def create_out_dir(dir_path):
"""creates a directory 'dir_path' if it does not exist"""
if not os.path.exists(dir_path):
os.makedirs(dir_path) | 15,294 |
def cyk(word: str, cfg: CFG) -> bool:
"""
Checks whether grammar derive the word.
This function is applicable to any CFG.
Parameters
----------
word: str
A word to derive in cfg
cfg: CFG
A CFG to derive a word
Returns
-------
bool:
Whether grammar deriv... | 15,295 |
def all_dynamic_dt_needed_paths(f, paths):
""" Return a dictionary of all the DT_NEEDED => Library Paths for
a given ELF file obtained by recursively following linkage.
"""
with open(f, 'rb') as file:
try:
readelf = ReadElf(file)
eclass = readelf.elf_class()
... | 15,296 |
def macd(df, ewa_short, ewa_long, ewa_signal, price_col="adj_close"):
"""Moving Average Convergence Divergence
Parameters:
-----------
df : DataFrame
Input dataframe.
ewa_short : int
Exponentially weighted average time-window for a short time-span.
A common choice for the sho... | 15,297 |
def make_annotation(field: ModelField):
"""
Convert a field annotation type to form data accepted type.
The method convert structural field such as `BaseModel` and `Dict` to a str. Such as the model's value is
supplied as a serialized JSON string format. Such string will be converted back to a dictiona... | 15,298 |
def rawfile_to_h5_external_dataset(bin_file, output_url, shape, dtype,
overwrite=False):
"""
Create a HDF5 dataset at `output_url` pointing to the given vol_file.
Either `shape` or `info_file` must be provided.
:param str bin_file: Path to the .vol file
:param Da... | 15,299 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.