content stringlengths 35 762k | sha1 stringlengths 40 40 | id int64 0 3.66M |
|---|---|---|
def flatten_mock_calls(mock):
"""
Flatten the calls performed on a particular mock object,
into a list of calls with arguments.
"""
result = []
for call in mock.mock_calls:
call = list(call)
call_name = call[0]
if '.' in str(call_name):
call_name = str(call_na... | 7c41025382f4ca25db1ccd328e9eb17e1d72a01a | 5,115 |
from typing import Any
def clean_setting(
name: str,
default_value: object,
min_value: int = None,
max_value: int = None,
required_type: type = None,
choices: list = None,
) -> Any:
"""cleans the user input for an app's setting in the Django settings file
Will use default_value if set... | 91066dd26987ad04fc9ae9b8447e35fa64f8365d | 5,116 |
def update_not_existing_kwargs(to_update, update_from):
"""
This function updates the keyword aguments from update_from in
to_update, only if the keys are not set in to_update.
This is used for updated kwargs from the default dicts.
"""
if to_update is None:
to_update = {}
to_update... | a66de151e6bc6d8f5b2f1b0ff32e30d2c8cb5277 | 5,117 |
def linear_forward(A, W, b):
"""Returns Z, (A, W, b)"""
Z = (W @ A) + b
cache = (A, W, b)
return Z, cache | 41d223473d2d8f084f13ca0f90f483b66e479a04 | 5,119 |
import contextlib
import wave
def read_wave(path):
"""Reads a .wav file.
Takes the path, and returns (PCM audio data, sample rate).
"""
with contextlib.closing(wave.open(path, 'rb')) as wf:
num_channels = wf.getnchannels()
assert num_channels == 1
sample_width = wf.getsampwidth... | 5148e788cb5f4bfe63b3e6f2cac24fe704fd9596 | 5,120 |
def update_rho_hat(rho_hat_q, rho_hat_g, phi_hat, K, Q, Y_tp1, gamma_t, W):
"""
rho_hat is an intermediate quantity
rho_hat_{n, nu, theta}(x) = 1/n E[ sum_{t=1}^n s(X_{t-1}, X_t, Y_t | Y_{0:n}, X_n=x)]
where s() are the sufficient statistics
see Cappe (2.5)
In our case (discrete emissions ... | 55713f9456ad3e8a5a1bf2fadf58e0befddf717a | 5,121 |
def obtain_dihedral_angles(system_coords, bond_distance):
"""
system_coords: coords for 1 frame
"""
ref_selection = system_coords[0]
# Process bonds for reference frame (first)
bonds = []
sq_bond_distance = bond_distance**2
for i in range(len(ref_selection)-1):
for j in rang... | 7aba964f81c550e6d6204d28327d65020e7372b0 | 5,122 |
def piecewise_accel(duration,initial,final):
"""Defines a piecewise acceleration.
Args:
duration (float): Length of time for the acceleration to complete.
initial (float): Initial value.
final (float): Final value.
"""
a = (final-initial)
return lambda t: initial + a * (
... | 7f6acd7ba2610a2e56cc1f0758b3a39543bfe8c2 | 5,123 |
def get_displayed_views(id):
"""
get views in window rect by view id str
:param res_id:
:return:
"""
return get_solo().get_displayed_views(id) | f3058f78ae1a2d70a3771a52cc852f1119a51f6a | 5,124 |
def get_build_version(xform):
"""
there are a bunch of unreliable places to look for a build version
this abstracts that out
"""
version = get_version_from_build_id(xform.domain, xform.build_id)
if version:
return version, BuildVersionSource.BUILD_ID
version = get_version_from_app... | 417debd5d3daf10c28222d42e6cc90869f5779ec | 5,125 |
import typing
def get_shortlist(routing_table: 'TreeRoutingTable', key: bytes,
shortlist: typing.Optional[typing.List['KademliaPeer']]) -> typing.List['KademliaPeer']:
"""
If not provided, initialize the shortlist of peers to probe to the (up to) k closest peers in the routing table
:pa... | 884e4444cca22eaf9495dad8ff28bfc601b4c778 | 5,126 |
from typing import Dict
from typing import Any
def get_merged_contextvars(bound_logger: BindableLogger) -> Dict[str, Any]:
"""
Return a copy of the current context-local context merged with the context
from *bound_logger*.
.. versionadded:: 21.2.0
"""
ctx = get_contextvars()
ctx.update(st... | 3ee59f57ee10c4f57c4085e660cc054830688416 | 5,127 |
import warnings
def policy_iteration(policy, env, value_function=None, threshold=0.00001, max_steps=1000, **kwargs):
"""
Policy iteration algorithm, which consists on iterative policy evaluation until convergence for the current policy
(estimate over many sweeps until you can't estimate no more). And then... | 090fc3a4e87986afc9dfd3565a2d234c7d2e8005 | 5,128 |
def create_blueprint(app):
"""Register blueprint routes on app."""
blueprint = Blueprint(
"invenio_records_marc21",
__name__,
template_folder="../templates",
url_prefix="/marc21",
)
blueprint = init_theme_views(blueprint, app)
blueprint = init_records_views(blueprint... | 8aa53185d3d41e4e5aabfa2efaa6a73b94dc02f5 | 5,130 |
import json
def mock_machine():
"""Fixture localapi Machine init with the data/response.json file."""
with requests_mock.Mocker() as mock_resp:
f = open(response_test_path,)
data = json.load(f)
machine_ipaddr = "0.0.0.0"
mock_addr = f"http://{machine_ipaddr}:3000/api/v1/hvac"
... | 726aecd3195d39f8a0c48d93a00299a5d61ac90a | 5,131 |
def get_files_links(service, v):
"""Print links of uploaded files.
:param: service (object): Goolge Drive service object.
:param: v (string): Version of Tor Browser to look for.
"""
windows_re = 'torbrowser-install-%s_\w\w(-\w\w)?\.exe(\.asc)?' % v
linux_re = 'tor-browser-linux\d\d-%s_(\w... | bda4af382bb629ce40721ccff64553cd2b98d558 | 5,132 |
def list_(context, field, mpd_query=None):
"""
*musicpd.org, music database section:*
``list {TYPE} [ARTIST]``
Lists all tags of the specified type. ``TYPE`` should be ``album``,
``artist``, ``date``, or ``genre``.
``ARTIST`` is an optional parameter when type is ``album``,
... | b95b6e4e5be01a1796d1708fc214821ce4f78491 | 5,133 |
def palindrome(d: int)-> str:
"""
Function is getting the digits of the number, left shifting it by multiplying
it with 10 at each iteration and adding it the previous result.
Input: Integer
Output: String (Sentence telling if the number is palindrome or not)
"""
remainder = 0
revnum = 0... | fe654ab92a905e265987856bcd2106c7b082b490 | 5,134 |
import json
def import_from_file(request):
"""
Import a part of a source site's page tree via an import of a JSON file
exported to a user's filesystem from the source site's Wagtail Admin
The source site's base url and the source page id of the point in the
tree to import defined what to import a... | 0dd6d4f2499a05c13002a0c410a8558b8f5b3b29 | 5,135 |
def _build_groupby_indices(df, table_name, join_columns):
"""
Pre-computes indexes based on the group-by columns.
Returns a dictionary of tuples to the list of indices.
"""
log.info("Grouping table '{}' by: {}.".format(table_name,
", ".join(join_colu... | 16ba9cd231aac2560a5735dc4727dd5c15b90fc2 | 5,138 |
from typing import List
def add_multiple_package(package_list: List[str]) -> str:
"""
Generate latex code to add multiple package to preamble
:param package_list: List of package to add in preamble
"""
usepackage_command_list = []
for package in package_list:
usepackage_command_list.a... | 90bdd0a521c094d92c35ef92e62d6b43f6b135b4 | 5,139 |
from metrics.models import Group
def emailAdmins(msgData):
"""
Emails all admins with given message. States which admin created/is sending the message to everyone.
Return: {bool}
"""
try:
if not msgData['msg']:
print('No message was provided to send.')
return False
admins = list(Group.objects.get(... | d44989205c2c60bc618cffcfc9a08ad141f35e4b | 5,140 |
def add(isamAppliance, name, chainItems=[], description=None, check_mode=False, force=False):
"""
Create an STS chain template
"""
if force is False:
ret_obj = search(isamAppliance, name)
if force is True or ret_obj['data'] == {}:
if check_mode is True:
return isamApplia... | 7050cfbb052164ed9c570c065b62d5d90609df2c | 5,141 |
def MinHamDistance(pattern, dna_list):
"""Calculate the minimum Hamming distance from a DNA list."""
return sum(HammingDistanceDiffLen(pattern, sequence) for sequence in
dna_list) | 37b1bc96e8a9622060ee6c1361f30df3b69b844f | 5,142 |
from datetime import datetime
def _add_note(text: str, user: KarmaUser) -> str:
"""Adds a new note to the database for the given user."""
_, note_msg = _parse_note_cmd(text)
if not note_msg:
return f"Sorry {user.username}, could not find a note in your message."
if _note_exists(note_msg, user... | 4b81f45c9839a919b41b6f45a09beaf322821211 | 5,143 |
def process_line(this_line, do_stemming=False, remove_stopwords=False):
"""
Given a line from the CSV file, gets the stemmed tokens.
"""
speech = process_csv_line(this_line)
speech_tokens = process_raw_speech_text(speech.contents, perform_stemming=do_stemming,
... | 2730bc7e942a2031f96cc40e889d72cf728bd45a | 5,145 |
def metadef_tag_count(context, namespace_name):
"""Get metadef tag count in a namespace"""
namespace = metadef_namespace_get(context, namespace_name)
_check_namespace_visibility(context, namespace, namespace_name)
count = 0
for tag in DATA['metadef_tags']:
if tag['namespace_id'] == namespa... | bc863cdbdde5abe4d845f01f49eed1a357e008e4 | 5,146 |
from typing import Literal
def act2graph(graph: Graph, xml_root: Xml, registry: dict,
namespaces: dict, tag: str) -> Graph:
""" Transform activityName tag into RDF graph.
The function transforms the Activity MasterData into identifier. The output
is a RDF graph that represents a part of the... | 899522fa59aa8acf8c0f55377793fc70be6c112b | 5,147 |
from typing import AnyStr
def to_checksum_address(value: AnyStr) -> ChecksumAddress:
"""
Makes a checksum address given a supported format.
"""
norm_address = to_normalized_address(value)
address_hash = encode_hex(keccak(text=remove_0x_prefix(norm_address)))
checksum_address = add_0x_prefix(
... | 7223c1fa612a1445c5c7d66410b9f34e4c302a74 | 5,148 |
def is_volatile(type):
"""returns True, if type represents C++ volatile type, False otherwise"""
nake_type = remove_alias(type)
return isinstance(nake_type, cpptypes.volatile_t) | d60e4ea471a818b878267e6f6f9a2e05f2728b1c | 5,149 |
def load_adult(as_frame: bool = False):
"""Load and return the higly imbalanced binary classification [adult income datatest](http://www.cs.toronto.edu/~delve/data/adult/desc.html).
you may find detailed description [here](http://www.cs.toronto.edu/~delve/data/adult/adultDetail.html)
"""
with resources... | 432ef18a197dba1a0e64b7606ba2d350fc402f28 | 5,150 |
from typing import Optional
from typing import Union
from typing import Callable
import json
import types
from typing import NoReturn
def sam(
body: Optional[Union[bool,Callable]] = json.loads,
pathParams: Optional[Union[bool,Callable]] = False,
queryString: Optional[Union[bool,Callable]] = False,
hea... | 4ccdbfc843fd07197819fae730faec97dc2316f7 | 5,151 |
import logging
def get_logger(name=None):
"""return a logger
"""
global logger
if logger is not None: return logger
print('Creating logger========================================>')
logger = logging.getLogger(name)
logger.setLevel(logging.INFO)
sh = logging.StreamHandler()
sh.setLe... | 34e0aa41b3e8c878574e1ab57eff41238b291672 | 5,152 |
import re
def LF_degen_spine(report):
"""
Checking for degenerative spine
"""
reg_01 = re.compile('degen',re.IGNORECASE)
reg_02 = re.compile('spine',re.IGNORECASE)
for s in report.report_text.text.split("."):
if reg_01.search(s) and reg_02.search(s):
return ABNORMAL_VAL
... | d0211476a3f179c26648546c21176866bad7c61e | 5,153 |
def make_log_format(fields, sep=" - "):
"""
Build a custom log format, as accepted by the logging module, from a list of field names.
:param fields: list or tuple of str - names of fields to use in log messages
:param sep: str - separator to put between fields. Default is ' - '
:return: a log format... | 7e05f4bced180ef98025576e9fa1b2cf4f296b92 | 5,154 |
def tweets_for(type, args, per_user=None):
"""
Retrieve tweets for a user, list or search term. The optional
``per_user`` arg limits the number of tweets per user, for
example to allow a fair spread of tweets per user for a list.
"""
lookup = {}
lookup[type] = args[0].strip("\"'")
tweets... | ae393d887de9d87a13c3d46a30bcc08d78867827 | 5,155 |
def sum_var(A):
"""summation over axis 1 (var) equivalent to np.sum(A, 1)"""
if issparse(A):
return A.sum(1).A1
else:
return np.sum(A, axis=1) if A.ndim > 1 else np.sum(A) | af866cb018a46746456efdb2e0c013a6410f9be4 | 5,156 |
def success_schema():
"""Pytest fixture for successful SchemaModel object"""
scm = SchemaVersion("1.0")
scm.success = True
return scm | c7a918a1be0d77607bccdedf80c3acaf5a56bd32 | 5,157 |
def _interfaces(config):
""" list system interfaces based on shape """
shape = lib.metadata.get_instance()['shape']
print
if config.getboolean('DEFAULT', 'auto') is True:
interfaces = lib.interfaces.get_interfaces_by_shape(shape)
else:
interfaces = config['DEFAULT']['interfaces']... | 7ea4d493293d910532b514edf4ec7efee2253a34 | 5,158 |
def getColumninfo(columns):
"""
See ElementFaceToThickness.
"""
ColumnC, problematicColumns = ElementFaceToThickness(columns)
return ColumnC | 985fbdabf95932ae4a8b57169ad6e1aaaa36f146 | 5,159 |
from typing import Any
from typing import Optional
def script(
command: str, inputs: Any = [], outputs: Any = NULL, tempdir=False, **task_options
) -> Any:
"""
Execute a shell script as a redun task with file staging.
"""
if outputs == NULL:
outputs = File("-")
command_parts = []
... | fb7b404d7d46680240863778b541afa83dec4528 | 5,160 |
import requests
def get_forms(console: Console, sess: requests.Session, form_id: str = "General_Record_2020v2.0"):
"""
Method to get every form for a given FormID
"""
raw_resp = get_url(url=f"https://forms.agterra.com/api/{form_id}/GetAll/0", sess=sess)
if raw_resp.status_code != 200:
con... | 129a8789a51db7a6e043fe6c8fbb30c1af984a74 | 5,161 |
def load_dataset(input_files,
input_vocab,
mode,
batch_size=32,
min_seq_len=5,
num_buckets=4):
"""Returns an iterator over the training data."""
def _make_dataset(text_files, vocab):
dataset = tf.data.TextLineDataset(te... | b71c6c8aa1bd2143c911fdd9e7e4ec1526656a39 | 5,162 |
def _get_results(**kwargs):
"""
Generate a command with the parameters, run it, and return the
normalized results
"""
output, error, rc = testoob.run_cmd.run_command(_generate_command(**kwargs))
return tt._normalize_newlines(output), tt._normalize_newlines(error), rc | 83dc64973fe4cfafd56391186361d3dbcc485f7d | 5,163 |
def infer_from_discretized_mix_logistic(params):
"""
Sample from discretized mixture of logistic distributions
Args:
params (Tensor): B x C x T, [C/3,C/3,C/3] = [logit probs, means, log scales]
Returns:
Tensor: sample in range of [-1, 1].
"""
log_scale_min = float(np.log(1e-14))
... | 993e5c64abd0b623057256b868c7e94570e28574 | 5,164 |
from pathlib import Path
def _load_reft_data(reft_file, index_name="btl_fire_num"):
"""
Loads reft_file to dataframe and reindexes to match bottle data dataframe
"""
reft_data = pd.read_csv(reft_file, usecols=["btl_fire_num", "T90", "REFTMP_FLAG_W"])
reft_data.set_index(index_name)
reft_data["... | c9ae2a9d5212f5d9234fc95fb4cc008688db07b4 | 5,166 |
def commit_veto(environ, status, headers):
"""Veto a commit.
This hook is called by repoze.tm in case we want to veto a commit
for some reason. Return True to force a rollback.
By default we veto if the response's status code is an error code.
Override this method, or monkey patch the instancemeth... | 9fc96fe8cdbedde20cb325e189b71d9df94cf176 | 5,167 |
def rate_limited_api(view_func):
"""
Checks users last post to rate limited endpoints
(adding comments or recipes) and rejects if within timeout period
for api requests (returns JSON response)
"""
@wraps(view_func)
def _wrapped_view(request, *args, **kwargs):
exceeded, msg = reques... | 3db16cd742339015efbbb9016a32a91e902453a3 | 5,168 |
from .tvdfunctions import CalculateTVD
from .backend import fetchoptions as fo
from .backend.exceptions import TVDLimiterFunctionInputError
def SecondOrderTVD(Uo, Courant, diffX, LimiterFunc, Limiter, Eps=0.01):
"""Return the numerical solution of dependent variable in the model eq.
This function uses the
... | 3433d3af49d1972868af7e21f02249c82de1a549 | 5,169 |
def login_required(func):
""" Allow only auth users """
async def wrapped(self, *args, **kwargs):
if self.request.user is None:
add_message(self.request, "LogIn to continue.")
redirect(self.request, "sign_in")
return await func(self, *args, **kwargs)
return wrapped | 80837caa726ce46e4728141208a575b25fe5dcb6 | 5,170 |
def _hunnyb_search_func(name):
"""search function required by ``codecs.register``"""
if name in (HUNNYB_ENC_NAME,) + HB_ALIASES:
return (_encode, _decode, None, None) | c9b1a6b68da2706d7568858d7211593e0bfa4086 | 5,171 |
def fingerprint_file(file):
"""Open, read file and calculate MD5 on its contents"""
with open(file,'rb') as fd:
# read contents of the file
_file_data = fd.read()
# pipe contents of the file through
file_fingerprint = md5(_file_data).hexdigest()
return file_fingerprint | 030412ad6a057b2cd2aae4032e6122df73817e41 | 5,172 |
def toOTLookup(self, font, ff):
"""Converts a fontFeatures.Routine object to binary.
Args:
font: A ``TTFont`` object.
ff: The parent ``FontFeatures`` object containing this routine.
Returns a list of ``fontTools.otlLib.builder`` Builder objects allowing this
routine to be converted to ... | ea08870cfec146135584bb8e85f2e861adfa3e05 | 5,173 |
def apply_to_all(func, results, datasets):
"""Apply the given function to all results
Args:
func: the function to apply
results: nested dictionary where the nested levels are: algorithm name, sensitive attribute
and split ID
datasets: nested dictionary where the nested ... | 6ea085b3541a84ac97f63389ba83c3a06d5e0b85 | 5,174 |
def any_value_except(mapping, excluded_keys):
"""Return a random value from a dict that is not associated with
excluded_key. Raises StopIteration if there are no other keys than
excluded_key"""
return next(mapping[key] for key in mapping if key not in excluded_keys) | 8d633713b93cfd1f0324d5c4a56a18fa7931ff06 | 5,175 |
import torch
def one_hot(y, num_dim=10):
"""
One Hot Encoding, similar to `torch.eye(num_dim).index_select(dim=0, index=y)`
:param y: N-dim tenser
:param num_dim: do one-hot labeling from `0` to `num_dim-1`
:return: shape = (batch_size, num_dim)
"""
one_hot_y = torch.zeros(y.size(0), num_d... | 694bfea18ecbb5c5737e0d38c0aa0f5f52a82a55 | 5,176 |
def IdentityMatrix():
"""Creates an identity rotation matrix.
Returns a rotation matrix that has no effect on orientation.
This matrix can be the starting point for other operations,
such as using a series of calls to #Pivot to
create a custom rotation matrix.
Returns
-------
RotationM... | f156a67000fb36360134d3c696dc9caefebf736a | 5,178 |
def compute_K_from_vanishing_points(vanishing_points):
"""Compute intrinsic matrix given vanishing points.
Args:
vanishing_points: A list of vanishing points.
Returns:
K: The intrinsic camera matrix (3x3 matrix).
"""
# vanishing points used
v1 = vanishing_points[0]
v2 = vani... | 972cba32caee46d9d9c7ed30a3f4ad23bfafe070 | 5,179 |
def _tpd2vec(seq, dtype=float):
"""
Convert a tpd file string to a vector, return a NumPy array.
EXAMPLES:
>>> _tpd2vec('1|13|4; 20; 25|28')
array([ 1., 5., 9., 13., 20., 25., 26., 27., 28.])
>>> _tpd2vec('5.5; 1.2@3; 3|7|2')
array([ 5.5, 1.2, 1.2, 1.2, 3. , ... | c561852d27025fc4f7db7f027fba0e18b2ca157c | 5,181 |
from typing import Dict
def get_notification_html(*, notification_type: str, options: Dict, sender: str) -> str:
"""
Returns the formatted html for the notification based on the notification_type
:return: A string representing the html markup to send in the notification
"""
validate_options(option... | 7996c8f472de89498b04ed6563b893381f680209 | 5,182 |
def step(parents: be.Population, fitness: be.Fitness) -> tuple:
"""
The step function defines how an algorithm generation will be conducted. This function must receive a population and
a fitness object and return another population. In this case we will define the parameters of the algorithm within
the ... | 700c5a9a28145b9454fc68356eab328a84418461 | 5,183 |
def asset_dividend_record(self, **kwargs):
"""Asset Dividend Record (USER_DATA)
Query asset dividend record.
GET /sapi/v1/asset/assetDividend
https://binance-docs.github.io/apidocs/spot/en/#asset-dividend-record-user_data
Keyword Args:
asset (str, optional)
startTime (int, optiona... | 80ecbf4f03bb4431829130f3da546b937cf53d13 | 5,184 |
def heuristic(node_1, node_2):
""" Heuristic when only 4 directions are posible (Manhattan) """
(x_node_1, y_node_1) = node_1
(x_node_2, y_node_2) = node_2
return abs(x_node_1 - x_node_2) + abs(y_node_1 - y_node_2) | e431ed9d8a7acb34604b3e83c3f3d7774cd27d51 | 5,185 |
def exercise_2(inputs): # DO NOT CHANGE THIS LINE
"""
Output should be the name of the class.
"""
output = Party
return output # DO NOT CHANGE THIS LINE | 6bf574921760aa2569d0a44ced8b9a3712d67faa | 5,186 |
def undistort(img, mtx, dist):
"""Undistort an image using camera matrix and distortion coefficients"""
h, w = img.shape[:2]
# return undistorted image with minimum unwanted pixels. It's okay to remove some pixesl at image corners.
newcameramtx, roi = cv2.getOptimalNewCameraMatrix(mtx, dist, (w,h), 0, ... | e8d32a8662a998c90f856116b97e555f2bdfeee4 | 5,187 |
def get_order(order_id, sandbox=False):
"""Get a single order using the Sell Fulfillment API."""
return single_api_call('sell_fulfillment_get_order', order_id=order_id,
field_groups='TAX_BREAKDOWN', sandbox=sandbox) | 74054dc63e6d57f162f6099389fa9c1870d8e08d | 5,188 |
def extend_dict(x, *y):
"""Similar to Object.assign() / _.extend() in Javascript, using
'dict.update()'
Args:
x (dict): the base dict to merge into with 'update()'
*y (dict, iter): any number of dictionary or iterable key/value
pairs to be sequentially merged into 'x'. Skipped i... | f10a5bc7d5ed3646e6a9f8f9535a16bd800c7fcd | 5,189 |
def ErrorCriteria(errors):
"""Monitor the number of unexpected errors logged in the cluster. If more than five
errors have occurred on the cluster during this time period, post an alert. Posts a
warning if between one and four errors have occurred.
"""
ERROR_ALERT_THRESHOLD = 5
alerts = []
warnings = []
... | 0b388ca55009bb5219bd30ead91ce67521c0e743 | 5,190 |
def bdnyc_skyplot():
"""
Create a sky plot of the database objects
"""
# Load the database
db = astrodb.Database('./database.db')
t = db.query('SELECT id, ra, dec, shortname FROM sources', fmt='table')
# Convert to Pandas data frame
data = t.to_pandas()
data.index = data['id']
... | 7ceba0d0b5cf151e5629fefa943fa1a48f62d430 | 5,191 |
def get_model_config(model_name, dataset, params):
"""Map model name to model network configuration."""
model_map = _get_model_map(dataset.name)
if model_name not in model_map:
raise ValueError('Invalid model name \'%s\' for dataset \'%s\'' %
(model_name, dataset.name))
else:
return... | 88ce2fbb3415b0d5fa2348b9f9ba5dd029e49a73 | 5,192 |
def post_search(request):
"""Allow text matching search. """
form = SearchForm()
query = None
results = []
if 'query' in request.GET: # check if result is submitted by looking for query
form = SearchForm(request.GET)
if form.is_valid():
query = form.cleaned_data['query']... | ff6f36f28a0dbaaba8957049eb2fc64ff76470dc | 5,193 |
import numba
def PrimacyCodingNumeric_receptor_activity_monte_carlo_numba_generator(conc_gen):
""" generates a function that calculates the receptor activity for a given
concentration generator """
func_code = receptor_activity_monte_carlo_numba_template.format(
CONCENTRATION_GENERATOR=conc_gen)
... | 8ca9758227fe6b7e57269e929a6a7dc4a7d6b549 | 5,194 |
def subtoken_counts(proposed, ground_truth):
"""
Compute the number of precise tokens, proposed tokens and ground truth tokens
from two strings representing tokens.
"""
gt_subtokens = set(compute_subtokens(ground_truth))
proposed_subtokens = set(compute_subtokens(proposed))
precise_subtokens... | 496abf452a09c521b71acfe2951232b5a4c7b40d | 5,195 |
import random
def welcome():
""" Define welcome reply """
hello = random.choice(_HELLO_)
nick = random.choice(_NICK_NAME_)
welcome = random.choice(_WELCOME_)
proposal = random.choice(_PROPOSAL_)
return hello + " " + nick + ", " + welcome + " ! " + proposal + " ?" | 87da460bde7bae59e54c108a68291d8c3b4258de | 5,196 |
def EDCN(linear_feature_columns,
dnn_feature_columns,
bridge_type='attention_pooling',
tau=0.1,
use_dense_features=True,
cross_num=2,
cross_parameterization='vector',
l2_reg_linear=1e-5,
l2_reg_embedding=1e-5,
l2_reg_cross=1e-5,
... | 61e3f6868613111001420d88b8c9b99f91361653 | 5,197 |
def cart_to_polar(arr_c):
"""Return cartesian vectors in their polar representation.
Parameters
----------
arr_c: array, shape (a1, a2, ..., d)
Cartesian vectors, with last axis indexing the dimension.
Returns
-------
arr_p: array, shape of arr_c
Polar vectors, using (radiu... | c4c2256fcc9b01849dc4012ceac017273dcc4ddb | 5,198 |
def createList(listSize):
"""
Creates list block that creates input instances for each element and an output instance for connecting to
the resulting list. List size is limited to 300 elements. Larger lists will be truncated.
:param listSize: The size of the list of point inputs that will be created
... | 91b508674ad6f26e9e7dd43cb372fb0804db7ccd | 5,199 |
def trash_description(spl, garbage, keyword, description="description_1"):
"""description_1 OR description_2"""
relocate = spl[spl[description].str.contains(keyword, na=False, regex=True)]
spl = spl[~spl[description].str.contains(keyword, na=False, regex=True)]
garbage = pd.concat([garbage, relocate], i... | 16a1512ddaf914bd5ebcd00f2dcdfa11d59ec73c | 5,201 |
import random
def prepositionalPhrase():
"""Builds and returns a prepositional phrase."""
return random.choice(prepositions) + " " + nounPhrase() | 33a6f1111f752c160ef90eedde4bf56b79b1100a | 5,202 |
def check_possible_dtype(df):
"""Guess dtypes for each column in a dataframe, where dataframe must contains only string values.
Raise an exception if dataframe contains non-string values.
:param df: a DataFrame whose all values must be strings.
"""
column = []
int_cnt = []
dec_cnt = []
... | 0e9759959af04fbf1bb9db3672f6a188afe7f6ab | 5,203 |
from typing import List
def filter_objects_avoiding_duplicated(objects: List[Object],
max_distance: int = 20) -> List[Object]:
"""Filtra los objetos evitando aquellas posibles que sean detecciones múltiples.
El fundamento del algoritmo es que si se detectan dos objetos ... | 042fee5df94dc1c72fb53635577c8006c57f73f9 | 5,204 |
def print_hdr(soup, hdr, file = None):
"""
:param soup: [bs4.BeautifulSoup] document context
:param hdr: [dict] header node to process
:param file: [stream] I/O stream to print to
:return: [stream] pass on the I/O stream so descent continues
"""
tag = hdr['tag']
tag_id = tag['id']
in... | 2c6fd613a5c6ddb5ec842fb7cee845d1a8771ccd | 5,207 |
from unittest.mock import Mock
def __empty_2():
""" Empty used as parent of cube_2 """
obj = Mock()
obj.name = 'empty_2'
obj.mode = 'OBJECT'
obj.to_mesh.return_value = None
obj.matrix_world = Matrix.Identity(4)
obj.visible_get.return_value = False
obj.hide_viewport = True
obj.hide_... | 024614d7967da5da6d6629167a20eda4188e812f | 5,208 |
def get_gradient(bf_data: np.ndarray, smooth=10):
"""
Removes first dimension,
Computes gradient of the image,
applies gaussian filter
Returns SegmentedImage object
"""
data = strip_dimensions(bf_data)
gradient = get_2d_gradient(data)
smoothed_gradient = gaussian_filter(gradient, smo... | 864b3bc118d08099c56657b2f2883e20de5c663e | 5,210 |
def sum_seq(seq):
""" Lambda wrapper for sum. """
return K.sum(seq, axis=1, keepdims=False) | e2bf342f6cda9bda50dc15814c7808a42e8a9925 | 5,211 |
def split_by_time(files_rad):
"""Separate a list of files by their timestamp"""
out = {}
if type(files_rad) == dict:
for k in files_rad.keys():
out[k] = _split_by_time(files_rad[k])
else:
out = _split_by_time(files_rad)
return out | 9a77b3db2e21c27198337b1a1852494bca5acefb | 5,212 |
def make_general_csv_rows(general_csv_dict):
"""
Method for make list of metrics from general metrics dict.
Rows using in general metrics writer
:param general_csv_dict: dict with all metrics
:type general_csv_dict: dict
:return: all metrics as rows
:rtype: list
"""
rows = []
f... | 45ca165d312b39cd0b7088e0bcbfb402a92e7e2b | 5,213 |
def build_hstwcs(crval1, crval2, crpix1, crpix2, naxis1, naxis2, pscale, orientat):
""" Create an HSTWCS object for a default instrument without distortion
based on user provided parameter values.
"""
wcsout = wcsutil.HSTWCS()
wcsout.wcs.crval = np.array([crval1,crval2])
wcsout.wcs.crpix = n... | 0247a8dc7e6aa083db50f21d82676216583be206 | 5,214 |
def build_regressor_for_ranking_positive_class(dataset, features, regression_target=TARGET_COLUMN):
"""This function builds a regressor based exclusively on positive class'
examples present in the dataset
"""
if regression_target in features:
print('The target for the regression task cannot be one of the f... | 1312751425f79c1e4fec09f705f0ea551e2a60b3 | 5,215 |
def get_speakable_timestamp(timestamp):
"""Return a 'speakable' timestamp, e.g. 8am, noon, 9pm, etc."""
speakable = f"{timestamp.strftime('%I').lstrip('0')} {timestamp.strftime('%p')}"
if speakable == '12 PM':
return 'noon'
elif speakable == '12 AM':
return 'midnight'
return speakab... | 0b724686ebd5d3152d9017dc456d2945c78be0ee | 5,216 |
def createColor(red: int, green: int, blue: int) -> tuple:
"""
Create color
Parameters:
red -> 0-255
green -> 0-255
blue -> 0-255
"""
return tuple(
max(min(red, 255), 0),
max(min(green, 255), 0),
max(min(blue, 255), 0)
) | 3e8ee43e9d458668f4312f9fd75050b5875036d7 | 5,217 |
from typing import List
def export_nodeclass_list(node_classes: List[NodeClass]) -> str:
"""Writes the Node data as a XML string. Does not write
to a file -- use ``with open(output_file) as out_stream:`` etc.
"""
# This is the data string, the rest is formalities
node_classes_string = '\n'.join([s... | f50638e9b3a7ab2f1df6e49703b9ed3e39916f9d | 5,218 |
import time
def recognition(request):
"""
style transform service
"""
if request.method == 'POST':
name = ''
predicitons = ''
try:
# load image
now = time.localtime()
img = request.FILES['image']
image_name = '{}{}{}{}{}o... | d8de5ab5c33e6ca0c2ac5afbec81c402f7151187 | 5,219 |
def url(s):
"""Validate url input"""
u = urlparse(s)
if u.scheme not in ["http", "https"]:
raise ValueError(s)
return u.geturl() | 82683af4ad6fb35b6d74409a9a429c4dfd81a723 | 5,220 |
import pickle
def getGPLCs(df, savepath='./',plotpath='./', bands='ugrizY', ts='0000000', fn='GPSet'):
"""Short summary.
Parameters
----------
df : type
Description of parameter `df`.
savepath : type
Description of parameter `savepath`.
plotpath : type
Description of p... | 755dec48771ae17c058565ef88087d6ec6a78aec | 5,221 |
import torch
def _featurize(inputs,model):
"""
Helper function used to featurize exemplars before feeding into
buffer.
"""
with torch.no_grad():
# Forward pass
outputs = model(*inputs).detach() #Featurize raw exem
return outputs | 191fd1b362f38309a35618284fcf3f1910a06bd6 | 5,222 |
def ligth_condition(img, args):
"""
Change ligthning condition in the image
Inputs:
img: Image to change ligthning
args: Dictionary with "gamma" argument
Return:
Image with ligthning values changed
"""
invGamma = 1.0 / args["gamma"]
table = np.array([((i / 255.0) ** i... | dc5273a1df8e13292147b00be45452a7ccf4a197 | 5,223 |
import numpy as np
from sklearn.metrics import mean_squared_error
def calc_RMSE(varx,vary,lats,lons,weight):
"""
Calculates root mean square weighted average
Parameters
----------
varx : 2d array
vary : 2d array
lons : 1d array of latitude
weigh... | 150d08e0790f3a8ce59a2054cdc042ff6cdc2969 | 5,224 |
def sample(internal_nodes, alpha=0.5, beta=0.5, only_tree=False):
""" Generates a junction tree with order internal nodes with the junction tree expander.
Args:
internal_nodes (int): number of nodes in the underlying graph
alpha (float): parameter for the subtree kernel
beta (float): pa... | d0cc00e7ad96491147149aa4be396af970a9f68f | 5,225 |
def _get_version_tuple():
"""
version as a tuple
"""
return major, minor, revision | 1d82390224de07964dce7c4e7fd3e32595b189a0 | 5,226 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.