content stringlengths 22 815k | id int64 0 4.91M |
|---|---|
def xmatch_arguments():
""" Obtain information about the xmatch service
"""
return jsonify({'args': args_xmatch}) | 7,400 |
def rosenbrock_func(x):
"""Rosenbrock objective function.
Also known as the Rosenbrock's valley or Rosenbrock's banana
function. Has a global minimum of :code:`np.ones(dimensions)` where
:code:`dimensions` is :code:`x.shape[1]`. The search domain is
:code:`[-inf, inf]`.
Parameters
--------... | 7,401 |
def isHdf5Dataset(obj):
"""Is `obj` an HDF5 Dataset?"""
return isinstance(obj, h5py.Dataset) | 7,402 |
async def on_message(message):
"""
メッセージ書き込みイベント
:param message: 書き込まれたメッセージ
:type message: Message
:return: None
:rtype: None
"""
try:
if message.channel.name.upper() == consts.CMD_CHANNEL_NAME and not message.author.bot:
await cmd_manager.execute(message)
except... | 7,403 |
def writing_height(sample_wrapper, in_air):
"""
Returns writing height.
:param sample_wrapper: sample wrapper object
:type sample_wrapper: HandwritingSampleWrapper
:param in_air: in-air flag
:type in_air: bool
:return: writing height
:rtype: float
"""
# Get the on-surface/in-ai... | 7,404 |
def simplify_index_permutations(expr, permutation_operators):
"""
Performs simplification by introducing PermutationOperators where appropriate.
Schematically:
[abij] - [abji] - [baij] + [baji] -> P(ab)*P(ij)*[abij]
permutation_operators is a list of PermutationOperators to consider.
If ... | 7,405 |
def get_classification_outcomes(
confusion_matrix: pd.DataFrame,
classes: Set[Any],
class_name: str,
) -> Tuple[int, int, int, int]:
"""
Given a confusion matrix, this function counts the cases of:
- **True Positives** : classifications that accurately labeled a class
- **True Negatives** :... | 7,406 |
def update_dbt_id(
table_name: str,
id_where: sqlalchemy.Integer,
columns: typing.Dict[str, str],
) -> None:
"""Update a database row based on its id column.
Args:
table_name (str): sqlalchemy.Table name.
id_where (sqlalchemy.Integer): Content of column id.
columns (Columns)... | 7,407 |
def save_ply(filename, points, colors=None, normals=None, binary=True):
"""
save 3D/2D points to ply file
Args:
points (numpy array): (N,2or3)
colors (numpy uint8 array): (N, 3or4)
"""
assert(points.ndim == 2)
if points.shape[-1] == 2:
points = np.concatenate([points, np.... | 7,408 |
def points_in_convex_polygon_3d_jit(points,
polygon_surfaces,
):
"""check points is in 3d convex polygons.
Args:
points: [num_points, 3] array.
polygon_surfaces: [num_polygon, max_num_surfaces,
max_num_points_of... | 7,409 |
def classify(data, target_class, model_type, best_model, data_path):
"""
Classify the data using the best model.
GIVEN:
data (ist) list of events to be classified
target_class (str) one of ["tactics", "techniques"]
model_type (str) one of ["nb", "lsvc"]
best_model... | 7,410 |
def _get_spamassassin_flag_path(domain_or_user):
"""
Get the full path of the file who's existence is used as a flag to turn
SpamAssassin on.
Args:
domain_or_user - A full email address or a domain name
"""
domain = domain_or_user.lower()
user = False
if '@' in domain:
u... | 7,411 |
def list_api_keys(ctx):
"""List all api keys in db."""
show_fields = ["valid_key", "allow_fallback", "allow_locate", "allow_region"]
db = configure_db("rw")
with db_worker_session(db) as session:
columns = ApiKey.__table__.columns
fields = [getattr(columns, f) for f in show_fields]
... | 7,412 |
def format(number):
"""Reformat the passed number to the standard format."""
number = compact(number)
return '-'.join((number[:3], number[3:-1], number[-1])) | 7,413 |
def get_terms(request):
"""Returns list of terms matching given query"""
if TEST_MODE:
thesaurus_name = request.params.get('thesaurus_name')
extract_name = request.params.get('extract_name')
query = request.params.get('term')
else:
thesaurus_name = request.validated.get('thes... | 7,414 |
def createExportNeuroML2(netParams=None, simConfig=None, output=False, reference=None, connections=True, stimulations=True, format='xml'):
"""
Wrapper function create and export a NeuroML2 simulation
Parameters
----------
netParams : ``netParams object``
NetPyNE netParams object specifying ... | 7,415 |
def i18n_pull():
"""pull the updated translation from transifex"""
with lcd('readthedocs'):
local('rm -rf rtd_tests/tests/builds/')
local('tx pull -f ')
local('django-admin makemessages --all')
local('django-admin compilemessages') | 7,416 |
def run_2(gosubdag):
"""Test GO colors at high and low levels of hierarchy."""
goids = [
'GO:0002682', # GO:0002682 1,127 D03 A regulation of immune system process
'GO:0002726'] # GO:0002726 2 D09 A +reg of T cell cytokine production
gosubdag.prt_goids(goids)
go2color = {
'... | 7,417 |
def load(file: str) -> pd.DataFrame:
"""Load custom file into dataframe. Currently will work with csv
Parameters
----------
file: str
Path to file
Returns
-------
pd.DataFrame:
Dataframe with custom data
"""
if not Path(file).exists():
return pd.DataFrame()... | 7,418 |
def better_get_first_model_each_manufacturer(car_db):
"""Uses map function and lambda to avoid code with side effects."""
result = map(lambda x: x[0], car_db.values())
# convert map to list
return list(result) | 7,419 |
def configurable_testcase(default_config_function):
"""Decorator to make a test case configurable."""
def internal_configurable_testcase(testcase):
_log_testcase_header(testcase.__name__, testcase.__doc__)
def wrapper_function(func, name, config, generate_default_func):
@wraps(func)
def _func... | 7,420 |
def _is_scalar(value):
"""Whether to treat a value as a scalar.
Any non-iterable, string, or 0-D array
"""
from collections import Iterable
return (getattr(value, 'ndim', None) == 0
or isinstance(value, (str, bytes))
or not isinstance(value, (Iterable,))) | 7,421 |
def start(ctx):
"""
Start Teamplify
"""
_start(ctx.obj['env']) | 7,422 |
def copy_random(x, y):
""" from 2 randInt calls """
seed = find_seed(x, y)
rand = JavaRandom(seed)
rand.next() # this will be y so we discard it
return rand | 7,423 |
def build_tree(train, max_depth, min_size, n_features):
"""build_tree(创建一个决策树)
Args:
train 训练数据集
max_depth 决策树深度不能太深,不然容易导致过拟合
min_size 叶子节点的大小
n_features 选取的特征的个数
Returns:
root 返回决策树
"""
# 返回最优列和相关的信息
root = get_sp... | 7,424 |
def test_image_download(mock_s3):
""" test running image download"""
runner = CliRunner()
result = runner.invoke(
voithos.cli.openstack.download_image, ["--image", "windows2019"], catch_exceptions=False,
)
assert result.exit_code == 0, result.output
assert mock_s3.download.called | 7,425 |
def delete_bucket(bucket_name: str, location: str, verbose: bool) -> bool:
"""Delete the specified S3 bucket
Args:
bucket_name (str): name of the S3 bucket
location (str): the location (region) the S3 bucket resides in
verbose (bool): enable verbose output
Returns:
bool: Tr... | 7,426 |
def generate_ordered_match_str_from_subseqs(r1,
subseqs_to_track,
rc_component_dict,
allow_overlaps=False):
"""Generates an ordered subsequences match string for the input sequence.
A... | 7,427 |
def combination(n: int, r: int) -> int:
""":return nCr = nPr / r!"""
return permutation(n, r) // factorial(r) | 7,428 |
def _extract_operator_data(fwd, inv_prep, labels, method='dSPM'):
"""Function for extracting forward and inverse operator matrices from
the MNE-Python forward and inverse data structures, and assembling the
source identity map.
Input arguments:
================
fwd : ForwardOperator
... | 7,429 |
def create_new_containers(module, intended, facts):
"""
Create missing container to CVP Topology.
Parameters
----------
module : AnsibleModule
Object representing Ansible module structure with a CvpClient connection
intended : list
List of expected containers based on following ... | 7,430 |
def main(test_only=False):
"""
Main function
"""
if not os.path.exists('adjectives.txt') or not os.path.exists('animals.txt'):
LOGGER.critical('You need both adjectives.txt and animals.txt')
sys.exit(1)
current_threats_group_id, archived_threats_group_id = get_group_ids()
base_... | 7,431 |
def find_window_for_buffer_name(cli, buffer_name):
"""
Look for a :class:`~prompt_toolkit.layout.containers.Window` in the Layout
that contains the :class:`~prompt_toolkit.layout.controls.BufferControl`
for the given buffer and return it. If no such Window is found, return None.
"""
from prompt_... | 7,432 |
def convert_transpose(params, w_name, scope_name, inputs, layers, weights, short_names):
"""
Convert transpose layer.
Args:
params: dictionary with layer parameters
w_name: name prefix in state_dict
scope_name: pytorch scope name
inputs: pytorch node inputs
layers: di... | 7,433 |
def main(mytimer: func.TimerRequest, outputblob: func.Out[bytes]):
# pylint: disable=E1136
"""Serverless scraping function."""
utc_timestamp = datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()
if mytimer.past_due:
logging.info("The timer is past due!")
url = "htt... | 7,434 |
def create_pilot(username='kimpilot', first_name='Kim', last_name='Pilot', email='kim@example.com', password='secret'):
"""Returns a new Pilot (User) with the given properties."""
pilot_group, _ = Group.objects.get_or_create(name='Pilots')
pilot = User.objects.create_user(username, email, password, first_na... | 7,435 |
def set_default_role():
"""Set custom default role.
By default::
`text` -> :title:`text`
we override with our role::
`text` -> `text`
See Also:
:attr:`roles.DEFAULT_INTERPRETED_ROLE`.
"""
if roles._roles.get('') != default_role:
roles._roles[''] = default_rol... | 7,436 |
def dict_to_datasets(data_list, components):
"""add models and backgrounds to datasets
Parameters
----------
datasets : `~gammapy.modeling.Datasets`
Datasets
components : dict
dict describing model components
"""
models = dict_to_models(components)
datasets = []
for... | 7,437 |
def of(*args: _TSource) -> Seq[_TSource]:
"""Create sequence from iterable.
Enables fluent dot chaining on the created sequence object.
"""
return Seq(args) | 7,438 |
async def test_create_stop_action(
decoy: Decoy,
mock_engine_store: EngineStore,
mock_run_store: RunStore,
mock_task_runner: TaskRunner,
run_id: str,
subject: RunController,
) -> None:
"""It should resume a run."""
result = subject.create_action(
action_id="some-action-id",
... | 7,439 |
def find_anagrams(word_list: list) -> dict:
"""Finds all anagrams in a word list and returns it in a dictionary
with the letters as a key.
"""
d = dict()
for word in word_list:
unique_key = single(word)
if unique_key in d:
d[unique_key].append(word... | 7,440 |
def check_aggregator(aggregator, source, expression_type, group_by):
"""Check aggregator fields."""
if aggregator["source"] != source:
raise ValueError(
"All expressions must be annotated by the same genome database (NCBI, UCSC, ENSEMBLE,...)."
)
if aggregator["expression_type"] ... | 7,441 |
def acr_helm_install_cli(client_version='2.16.3', install_location=None, yes=False):
"""Install Helm command-line tool."""
if client_version >= '3':
logger.warning('Please note that "az acr helm" commands do not work with Helm 3, '
'but you can still push Helm chart to ACR using ... | 7,442 |
def resolver(state_sets, event_map):
"""Given a set of state return the resolved state.
Args:
state_sets(list[dict[tuple[str, str], str]]): A list of dicts from
type/state_key tuples to event_id
event_map(dict[str, FrozenEvent]): Map from event_id to event
Returns:
dict... | 7,443 |
def parse_study(study):
"""Parse study
Args:
study (object): object from DICOMDIR level 1 object (children of patient_record)
Returns:
children_object
appending_keys
"""
#study_id = study.StudyID
study_date = study.StudyDate
study_time = study.StudyTime
... | 7,444 |
def list_organizational_units_for_parent_single_page(self, **kwargs):
"""
This will continue to call list_organizational_units_for_parent until there are no more pages left to retrieve.
It will return the aggregated response in the same structure as list_organizational_units_for_parent does.
:param sel... | 7,445 |
def daemon(target, name=None, args=None, kwargs=None, after=None):
"""
Create and start a daemon thread.
It is same as `start()` except that it sets argument `daemon=True`.
"""
return start(target, name=name, args=args, kwargs=kwargs,
daemon=True, after=after) | 7,446 |
def mkdirs(dpath):
"""
Create directory path (including the path of the parent directory if it
doesn't already exist)
:param dpath: string - path to directory to be created
"""
if os.path.isdir(dpath):
return
try:
os.makedirs(dpath)
except OSError as exc:
fstr =... | 7,447 |
def convert_pc2plyandweaklabels(anno_path, save_path, sub_pc_folder,
weak_label_folder, weak_label_ratio, sub_grid_size,
gt_class, gt_class2label):
"""
Convert original dataset files (consiting of rooms) to ply file and weak labels. Physically, each room will generate sev... | 7,448 |
def add_image_to_obj(obj, img, *args, **kwargs):
"""
"""
# skip everything if there is no image
if img == None:
return None
# find out of the object is an artist or an album
# then add the artist or the album to the objects
objs = {}
if isinstance(obj, Artist):
objs['art... | 7,449 |
def test_zco_x_y_invariant():
"""Make sure all vertical columns are identical"""
# Generate 2x2 flat bathymetry dataset
ds_bathy = Bathymetry(10.0e3, 1.2e3, 2, 2).flat(5.0e3)
ds_bathy.domcfg.jpk = 10
ds = ds_bathy.domcfg.zco(ppdzmin=10, pphmax=5.0e3)
# Check z3 and e3
for varname in ["z3T"... | 7,450 |
def rr_category_ad(context, ad_zone, ad_category, index=0):
"""
Returns a rr advert from the specified category based on index.
Usage:
{% load adzone_tags %}
{% rr_category_ad 'zone_slug' 'my_category_slug' 1 %}
"""
to_return = {'random_int': randint(1000000, 10000000)}
# Retrieve a r... | 7,451 |
def test_config_unset_backreferences(config_app):
"""Testing Deprecation warning message against unset backreference config
In this case the user is notified to update the set the
backreferences_dir config variable if such feature is to be enabled or
otherwise to deactivate the feature. Sphinx-Gallery ... | 7,452 |
def decoderCNN(x, layers):
""" Construct the Decoder
x : input to decoder
layers : the number of filters per layer (in encoder)
"""
# Feature unpooling by 2H x 2W
for _ in range(len(layers) - 1, 0, -1):
n_filters = layers[_]
x = Conv2DTranspose(n_filters, (3, 3), strides... | 7,453 |
def main(tempdir):
"""
Create a Flask app, and using the updated security config file get a new
REST token. Then write the new token to a file under the snapshot`s
tmp dir.
:param tempdir: The temp dir used by `restore snapshot` wf.
"""
setup_flask_app()
sm = get_storage_manager()
ad... | 7,454 |
def centroid_imzml(input_filename, output_filename, step=[], apodization=False, w_size=10, min_intensity=1e-5, prevent_duplicate_pixels=False):
# write a file to imzml format (centroided)
"""
:type input_filename string - source file path (must be .imzml)
:type output_filename string - output file path... | 7,455 |
def binary_dice_iou_score(
y_pred: torch.Tensor,
y_true: torch.Tensor,
mode="dice",
threshold=None,
nan_score_on_empty=False,
eps=1e-7,
ignore_index=None,
) -> float:
"""
Compute IoU score between two image tensors
:param y_pred: Input image tensor of any shape
:param y_true:... | 7,456 |
def test_glob_files_single_pattern(root_directory: str) -> None:
"""Test pattern matching while listing notebooks in a directory."""
data_directory = os.path.join(root_directory, "tests", "data")
nb_pattern = os.path.join("replace_images_in_markdown", "*.ipynb")
files = glob_files(data_directory, nb_pat... | 7,457 |
def get_uname_arch():
"""
Returns arch of the current host as the kernel would interpret it
"""
global _uname_arch # pylint: disable=global-statement
if not _uname_arch:
_uname_arch = detect_uname_arch()
return _uname_arch | 7,458 |
def _getSTSToken() -> Tuple[str, BosClient, str]:
"""
Get the token to upload the file
:return:
"""
if not Define.hubToken:
raise Error.ArgumentError('Please provide a valid token', ModuleErrorCode, FileErrorCode, 4)
config = _invokeBackend("circuit/genSTS", {"token": Define.hubToken}... | 7,459 |
async def get_thumb_file(mass: MusicAssistant, url, size: int = 150):
"""Get path to (resized) thumbnail image for given image url."""
assert url
cache_folder = os.path.join(mass.config.data_path, ".thumbs")
cache_id = await mass.database.get_thumbnail_id(url, size)
cache_file = os.path.join(cache_f... | 7,460 |
def load_pickle(filename):
"""Load Pickfle file"""
filehandler = open(filename, 'rb')
return pickle.load(filehandler) | 7,461 |
def columnize(s, header=None, width=40):
"""Dump an object and make each line the given width
The input data will run though `json.loads` in case it is a JSON object
Args:
s (str): Data to format
header (optional[str]): Header to prepend to formatted results
width (optional[int]): ... | 7,462 |
def create_eeg_epochs(config):
"""Create the data with each subject data in a dictionary.
Parameter
----------
subject : string of subject ID e.g. 7707
trial : HighFine, HighGross, LowFine, LowGross
Returns
----------
eeg_epoch_dataset : dataset of all the subjects with different con... | 7,463 |
def restart():
"""Terminate the currently running instance of the script and start a new
one.
:command: `Reload <https://www.autohotkey.com/docs/commands/Reload.htm>`_
"""
# TODO: If the new script has an error, AHK will show it and quit. Instead,
# keep the old script running.
from . impor... | 7,464 |
def to_newick(phylo):
"""
Returns a string representing the simplified Newick code of the input.
:param: `PhyloTree` instance.
:return: `str` instance.
"""
return phylo_to_newick_node(phylo).newick | 7,465 |
def pipe(*functions):
"""
pipes functions one by one in the provided order
i.e. applies arg1, then arg2, then arg3, and so on
if any arg is None, just skips it
"""
return functools.reduce(
lambda f, g: lambda x: f(g(x)) if g else f(x),
functions[::-1],
lambda x: x) if fun... | 7,466 |
def remove_news_update(removed_update_name: str, expired: bool) -> None:
"""Removes any expired news articles or any articles that have been
manuallyremoved by the user.
If an update has expired, a loop is used to find the update and remove it
from the global list of updates. Otherwise, updates need to... | 7,467 |
def initialize_graph_batch(batch_size):
""" Initialize a batch of empty graphs to begin the generation process.
Args:
batch_size (int) : Batch size.
Returns:
generated_nodes (torch.Tensor) : Empty node features tensor (batch).
generated_edges (torch.Tensor) : Empty edge features tensor (... | 7,468 |
def TimestampFromTicks(ticks):
"""Construct an object holding a timestamp value from the given ticks value
(number of seconds since the epoch).
This function is part of the `DBAPI 2.0 specification
<http://www.python.org/dev/peps/pep-0249/>`_.
:rtype: :class:`datetime.datetime`
"""
return ... | 7,469 |
def extract_object_token(data, num_tokens, obj_list=[], verbose=True):
""" Builds a set that contains the object names. Filters infrequent tokens. """
token_counter = Counter()
for img in data:
for region in img['objects']:
for name in region['names']:
if not obj_li... | 7,470 |
def test_lambda_expressions():
"""Lambda 表达式"""
# 这个函数返回两个参数的和:lambda a, b: a+b
# 与嵌套函数定义一样,lambda函数可以引用包含范围内的变量。
def make_increment_function(delta):
"""本例使用 lambda 表达式返回函数"""
return lambda number: number + delta
increment_function = make_increment_function(42)
assert increm... | 7,471 |
def test_dont_merge():
"""
Configuration support disabling recursive merging
"""
config = Configuration(
nested=dict(
__merge__=False,
nested_key="nested_value",
other_key="initial_value",
)
)
config.merge(
key="value",
nested=... | 7,472 |
def _generate_with_relative_time(initial_state, condition, iterate, time_mapper) -> Observable:
"""Generates an observable sequence by iterating a state from an
initial state until the condition fails.
Example:
res = source.generate_with_relative_time(0, lambda x: True, lambda x: x + 1, lambda x: 0... | 7,473 |
def test_cache_model(app, authed_client):
"""Test that caching a model works."""
user = User.from_pk(1)
cache.cache_model(user, timeout=60)
user_data = cache.get('users_1')
assert user_data['id'] == 1
assert user_data['username'] == 'user_one'
assert user_data['enabled'] is True
assert u... | 7,474 |
def setup(bot: commands.Bot) -> None:
"""Starts owner cog."""
bot.add_cog(owner(bot)) | 7,475 |
def is_prime(num):
"""判断一个数是不是素数"""
for factor in range(2, int(num ** 0.5) + 1):
if num % factor == 0:
return False
return True if num != 1 else False | 7,476 |
def ptcorr(y1, y2, dim=-1, eps=1e-8, **kwargs):
"""
Compute the correlation between two PyTorch tensors along the specified dimension(s).
Args:
y1: first PyTorch tensor
y2: second PyTorch tensor
dim: dimension(s) along which the correlation is computed. Any valid PyTor... | 7,477 |
def discretize_time_difference(
times, initial_time, frequency, integer_timestamps=False
) -> typing.Sequence[int]:
"""method that discretizes sequence of datetimes (for prediction slices)
Arguments:
times {Sequence[datetime] or Sequence[float]} -- sequence of datetime objects
... | 7,478 |
def pathlines(u_netcdf_filename,v_netcdf_filename,w_netcdf_filename,
startx,starty,startz,startt,
t,
grid_object,
t_max,delta_t,
u_netcdf_variable='UVEL',
v_netcdf_variable='VVEL',
w_netcdf_variable='WVEL',
u_gri... | 7,479 |
def firing_rate(x, theta=0.5, alpha=0.12):
""" Sigmoidal firing rate function
Parameters
----------
x : float
Mean membrane potential.
theta : float
Inflection point (mean firing activity) of sigmoidal curve (default
value 0.12)
alpha : float
Steepness of si... | 7,480 |
def plot_nn_pred_2D_extra_std(x, y_pred, y_target):
"""
x np.array(n_samples, n_tgrid, n_xgrid, dim_in)
y_pred np.array(n_samples, n_tgrid, n_xgrid, 1)
y_target np.array(n_samples, n_tgrid, n_xgrid, 1)
"""
n_samples = x.shape[0]
n_tgrid = x.shape[1]
n_xgrid = x.shape[2]
x_sa... | 7,481 |
def generator2(Trainval_GT, Trainval_N, Pos_augment, Neg_select, augment_type, pattern_type, zero_shot_type, isalign,
epoch=0):
"""
:param Trainval_GT:
:param Trainval_N:
:param Pos_augment:
:param Neg_select:
:param augment_type:
:param pattern_type:
:return:
"""
... | 7,482 |
def map_points(pois, sample_size=-1, kwd=None, show_bbox=False, tiles='OpenStreetMap', width='100%', height='100%'):
"""Returns a Folium Map displaying the provided points. Map center and zoom level are set automatically.
Args:
pois (GeoDataFrame): A GeoDataFrame containing the POIs to be displayed.
... | 7,483 |
def read_data_file():
"""
Reads Data file from datafilename given name
"""
datafile = open(datafilename, 'r')
old = datafile.read()
datafile.close()
return old | 7,484 |
def squared_loss(y_hat, y):
"""均方损失。"""
return (y_hat - y.reshape(y_hat.shape))**2 / 2 | 7,485 |
def normalize_string(string, lowercase=True, convert_arabic_numerals=True):
"""
Normalize the given string for matching.
Example::
>>> normalize_string("tétéà 14ème-XIV, foobar")
'tetea XIVeme xiv, foobar'
>>> normalize_string("tétéà 14ème-XIV, foobar", False)
'tetea 14e... | 7,486 |
def find_sub_expression(
expression: Sequence[SnailfishElement],
) -> Sequence[SnailfishElement]:
"""Finds the outermost closed sub-expression in a subsequence."""
num_open_braces = 1
pos = 0
while num_open_braces > 0:
pos += 1
if expression[pos] == "[":
num_open_braces +... | 7,487 |
def printDebug(s, style=None):
"""
=> http://click.pocoo.org/5/utils/
EG
click.secho('Hello World!', fg='green')
click.secho('Some more text', bg='blue', fg='white')
click.secho('ATTENTION', blink=True, bold=True)
"""
msg = ">>[%s]debug>>: %s" % (strftime("%H:%M:%S"), s)
try:
if style == "comm... | 7,488 |
def set_secondary(typedef, fileobj, discovered):
"""
Pull over missing secondaryFiles to the job object entry.
Adapted from:
https://github.com/curoverse/arvados/blob/2b0b06579199967eca3d44d955ad64195d2db3c3/sdk/cwl/arvados_cwl/runner.py#L67
"""
if isinstance(fileobj, MutableMapping) and fileob... | 7,489 |
def run_tha_test(manifest, cache_dir, remote, max_cache_size, min_free_space):
"""Downloads the dependencies in the cache, hardlinks them into a temporary
directory and runs the executable.
"""
cache = Cache(cache_dir, remote, max_cache_size, min_free_space)
outdir = tempfile.mkdtemp(prefix='run_tha_test')
... | 7,490 |
def _add_baseline(baseline_results, counts, doc, correct_ents, kb):
"""
Measure 3 performance baselines: random selection, prior probabilities, and 'oracle' prediction for upper bound.
Only evaluate entities that overlap between gold and NER, to isolate the performance of the NEL.
"""
for ent in doc... | 7,491 |
def get_rec_attr(obj, attrstr):
"""Get attributes and do so recursively if needed"""
if attrstr is None:
return None
if "." in attrstr:
attrs = attrstr.split('.', maxsplit=1)
if hasattr(obj, attrs[0]):
obj = get_rec_attr(getattr(obj, attrs[0]), attrs[1])
else:
... | 7,492 |
def _get_message_mapping(types: dict) -> dict:
"""
Return a mapping with the type as key, and the index number.
:param types: a dictionary of types with the type name, and the message type
:type types: dict
:return: message mapping
:rtype: dict
"""
message_mapping = {}
entry_index = ... | 7,493 |
def _get_default_config_files_location():
"""Get the locations of the standard configuration files. These are
Unix/Linux:
1. `/etc/pywps.cfg`
2. `$HOME/.pywps.cfg`
Windows:
1. `pywps\\etc\\default.cfg`
Both:
1. `$PYWPS_CFG environment variable`
:returns: configuratio... | 7,494 |
def cd(path):
"""
Change location to the provided path.
:param path: wlst directory to which to change location
:return: cmo object reference of the new location
:raises: PyWLSTException: if a WLST error occurs
"""
_method_name = 'cd'
_logger.finest('WLSDPLY-00001', path, class_name=_c... | 7,495 |
def plt_roc_curve(y_true, y_pred, classes, writer, total_iters):
"""
:param y_true:[[1,0,0,0,0], [0,1,0,0], [1,0,0,0,0],...]
:param y_pred: [0.34,0.2,0.1] , 0.2,...]
:param classes:5
:return:
"""
fpr = {}
tpr = {}
roc_auc = {}
roc_auc_res = []
n_classes = len(cla... | 7,496 |
def get_deps(sentence_idx: int, graph: DependencyGraph):
"""Get the indices of the dependants of the word at index sentence_idx
from the provided DependencyGraph"""
return list(chain(*graph.nodes[sentence_idx]['deps'].values())) | 7,497 |
def incr(func):
"""
Increment counter
"""
@functools.wraps(func)
def wrapper(self):
# Strip off the "test_" from the function name
name = func.__name__[5:]
def _incr(counter, num):
salt.utils.process.appendproctitle("test_{}".format(name))
for _ in r... | 7,498 |
def IgnoreSigint():
"""Ignores any future SIGINTs."""
signal.signal(signal.SIGINT, signal.SIG_IGN) | 7,499 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.