content stringlengths 22 815k | id int64 0 4.91M |
|---|---|
def prepositionalPhrase():
"""Builds and returns a prepositional phrase."""
return random.choice(prepositions) + " " + nounPhrase() | 7,700 |
def output_yaml_key(dictionary, yaml_key, hierarchy_regex):
"""
Fancy print function that prints out a key in the global dictionary with the hierarchy ordered as defined in
hiera.yaml
"""
print("%s:" % yaml_key)
if len(dictionary[yaml_key]) > 1:
# convert all 2nd level dict into orderedd... | 7,701 |
def check_possible_dtype(df):
"""Guess dtypes for each column in a dataframe, where dataframe must contains only string values.
Raise an exception if dataframe contains non-string values.
:param df: a DataFrame whose all values must be strings.
"""
column = []
int_cnt = []
dec_cnt = []
... | 7,702 |
def display_image(img, heatmap, vectmap):
"""
Displays an image and associated heatmaps and pafs (all)
:param img:
:param heatmap:
:param vectmap:
:return:
"""
fig = plt.figure()
a = fig.add_subplot(2, 2, 1)
a.set_title('Image')
plt.imshow(_get_bgimg(img))
a = fig.add_s... | 7,703 |
def filter_objects_avoiding_duplicated(objects: List[Object],
max_distance: int = 20) -> List[Object]:
"""Filtra los objetos evitando aquellas posibles que sean detecciones múltiples.
El fundamento del algoritmo es que si se detectan dos objetos con un centroide muy cerca... | 7,704 |
def splitall(path):
"""
Credit goes to Trent Mick
SOURCE:
https://www.oreilly.com/library/view/python-cookbook/0596001673/ch04s16.html
"""
allparts = []
while 1:
parts = os.path.split(path)
if parts[0] == path: # sentinel for absolute paths
allparts.ins... | 7,705 |
def netstat():
"""
Return list of all connections.
Return list of TCP listenning connections and UDP connections.
All localhost connections are filtered out.
This script must run as root in order to be able to obtain PID values
of all processes. For more information see:
https://unix.stack... | 7,706 |
def print_hdr(soup, hdr, file = None):
"""
:param soup: [bs4.BeautifulSoup] document context
:param hdr: [dict] header node to process
:param file: [stream] I/O stream to print to
:return: [stream] pass on the I/O stream so descent continues
"""
tag = hdr['tag']
tag_id = tag['id']
in... | 7,707 |
def usage():
"""
Prints usage information
"""
print("usage: %s <title> <gof_fileroot> <indir> <outdir> <cutoff>" %
(sys.argv[0])) | 7,708 |
def test_peekleft_child_empty_deque():
"""Test peekleft_child on an empty deque."""
test_deque = Deque()
peek_value = test_deque.peekleft_child()
assert peek_value is None | 7,709 |
def __empty_2():
""" Empty used as parent of cube_2 """
obj = Mock()
obj.name = 'empty_2'
obj.mode = 'OBJECT'
obj.to_mesh.return_value = None
obj.matrix_world = Matrix.Identity(4)
obj.visible_get.return_value = False
obj.hide_viewport = True
obj.hide_render = True
return obj | 7,710 |
def parse_args(args):
"""Parse command line parameters
Args:
args ([str]): command line parameters as list of strings
Returns:
:obj:`argparse.Namespace`: command line parameters namespace
"""
parser = argparse.ArgumentParser(
description="A scaffolding program for developer not... | 7,711 |
def get_gradient(bf_data: np.ndarray, smooth=10):
"""
Removes first dimension,
Computes gradient of the image,
applies gaussian filter
Returns SegmentedImage object
"""
data = strip_dimensions(bf_data)
gradient = get_2d_gradient(data)
smoothed_gradient = gaussian_filter(gradient, smo... | 7,712 |
def sum_seq(seq):
""" Lambda wrapper for sum. """
return K.sum(seq, axis=1, keepdims=False) | 7,713 |
def p_opt_visible_order_spec(t):
"""opt_visible_order_spec : LPAREN visible_order_spec RPAREN"""
t[0] = t[2] | 7,714 |
async def test_sensors(hass, setup_entry):
"""Test the SleepIQ binary sensors for a bed with two sides."""
entity_registry = er.async_get(hass)
state = hass.states.get("sensor.sleepnumber_ile_test1_sleepnumber")
assert state.state == "40"
assert state.attributes.get(ATTR_ICON) == "mdi:bed"
asse... | 7,715 |
def split_by_time(files_rad):
"""Separate a list of files by their timestamp"""
out = {}
if type(files_rad) == dict:
for k in files_rad.keys():
out[k] = _split_by_time(files_rad[k])
else:
out = _split_by_time(files_rad)
return out | 7,716 |
def make_general_csv_rows(general_csv_dict):
"""
Method for make list of metrics from general metrics dict.
Rows using in general metrics writer
:param general_csv_dict: dict with all metrics
:type general_csv_dict: dict
:return: all metrics as rows
:rtype: list
"""
rows = []
f... | 7,717 |
def build_hstwcs(crval1, crval2, crpix1, crpix2, naxis1, naxis2, pscale, orientat):
""" Create an HSTWCS object for a default instrument without distortion
based on user provided parameter values.
"""
wcsout = wcsutil.HSTWCS()
wcsout.wcs.crval = np.array([crval1,crval2])
wcsout.wcs.crpix = n... | 7,718 |
def build_regressor_for_ranking_positive_class(dataset, features, regression_target=TARGET_COLUMN):
"""This function builds a regressor based exclusively on positive class'
examples present in the dataset
"""
if regression_target in features:
print('The target for the regression task cannot be one of the f... | 7,719 |
def get_speakable_timestamp(timestamp):
"""Return a 'speakable' timestamp, e.g. 8am, noon, 9pm, etc."""
speakable = f"{timestamp.strftime('%I').lstrip('0')} {timestamp.strftime('%p')}"
if speakable == '12 PM':
return 'noon'
elif speakable == '12 AM':
return 'midnight'
return speakab... | 7,720 |
def compute_save_stat(outdir, trn_dict, dec_dict, wavlen_dict, declen_dict, fwlen_dict):
"""
Save computed statistics e.g. WER, decoding length, wave length
Args:
outdir(str): path to directory for the generated log files are saved.
trn_dict(dict): (Wave name, transcription) dictionary
... | 7,721 |
def createColor(red: int, green: int, blue: int) -> tuple:
"""
Create color
Parameters:
red -> 0-255
green -> 0-255
blue -> 0-255
"""
return tuple(
max(min(red, 255), 0),
max(min(green, 255), 0),
max(min(blue, 255), 0)
) | 7,722 |
def export_nodeclass_list(node_classes: List[NodeClass]) -> str:
"""Writes the Node data as a XML string. Does not write
to a file -- use ``with open(output_file) as out_stream:`` etc.
"""
# This is the data string, the rest is formalities
node_classes_string = '\n'.join([str(c) for c in node_classe... | 7,723 |
def recognition(request):
"""
style transform service
"""
if request.method == 'POST':
name = ''
predicitons = ''
try:
# load image
now = time.localtime()
img = request.FILES['image']
image_name = '{}{}{}{}{}object.jpg'.for... | 7,724 |
def url(s):
"""Validate url input"""
u = urlparse(s)
if u.scheme not in ["http", "https"]:
raise ValueError(s)
return u.geturl() | 7,725 |
def getGPLCs(df, savepath='./',plotpath='./', bands='ugrizY', ts='0000000', fn='GPSet'):
"""Short summary.
Parameters
----------
df : type
Description of parameter `df`.
savepath : type
Description of parameter `savepath`.
plotpath : type
Description of parameter `plotpa... | 7,726 |
async def db_async_test_data(db_async_session):
"""A fixture to fill the DB with test data.
Use this in asynchronous tests.
"""
async with db_async_session.begin():
for obj in _gen_test_data_objs():
db_async_session.add(obj) | 7,727 |
def prepare_embeddings():
"""
Prepares fastText embeddings (available at https://fasttext.cc/docs/en/english-vectors.html) for use in the model.
Function expects unarchived fastText embedding file.
"""
file_in = io.open(cnf.embedding_file, 'r', encoding="utf-8", newline='\n', errors='ignore')
n... | 7,728 |
def _featurize(inputs,model):
"""
Helper function used to featurize exemplars before feeding into
buffer.
"""
with torch.no_grad():
# Forward pass
outputs = model(*inputs).detach() #Featurize raw exem
return outputs | 7,729 |
def ligth_condition(img, args):
"""
Change ligthning condition in the image
Inputs:
img: Image to change ligthning
args: Dictionary with "gamma" argument
Return:
Image with ligthning values changed
"""
invGamma = 1.0 / args["gamma"]
table = np.array([((i / 255.0) ** i... | 7,730 |
def calc_RMSE(varx,vary,lats,lons,weight):
"""
Calculates root mean square weighted average
Parameters
----------
varx : 2d array
vary : 2d array
lons : 1d array of latitude
weight : string (yes or no)
Returns
-------
... | 7,731 |
def sample(internal_nodes, alpha=0.5, beta=0.5, only_tree=False):
""" Generates a junction tree with order internal nodes with the junction tree expander.
Args:
internal_nodes (int): number of nodes in the underlying graph
alpha (float): parameter for the subtree kernel
beta (float): pa... | 7,732 |
def _get_version_tuple():
"""
version as a tuple
"""
return major, minor, revision | 7,733 |
def compare_activity_to_sector_flowamounts(fba_load, fbs_load,
activity_set, config):
"""
Function to compare the loaded flowbyactivity with the final flowbysector
by activityname (if exists) to target sector level
output, checking for data loss
:param fba_... | 7,734 |
def _fit_seasonal_model_with_gibbs_sampling(observed_time_series,
seasonal_structure,
num_warmup_steps=50,
num_results=100,
seed=None):
"""Bui... | 7,735 |
def deskew(data, angle, dx, dz, rotate=True, return_resolution=True, out=None):
"""
Args:
data (ndarray): 3-D array to apply deskew
angle (float): angle between the objective and coverslip, in degree
dx (float): X resolution
dz (float): Z resolution
rotate (bool, optional... | 7,736 |
def process_data(path,stage = 'train'):
"""
train
test
sample_submission
"""
# loading the data
df = pd.read_csv(os.path.join(path,f'{stage}.csv'))
MASK = -1 # fill NA with -1
T_HIST = 10 # time history, last 10 games
# for cols "date", change to datatime
for col in df.filter(regex='date', axis=1).columns:... | 7,737 |
def rm_standard_dev(var,window):
"""
Smoothed standard deviation
"""
import pandas as pd
import numpy as np
print('\n\n-----------STARTED: Rolling std!\n\n')
rollingstd = np.empty((var.shape))
for ens in range(var.shape[0]):
for i in range(var.shape[2]):
for... | 7,738 |
def enable_scope(daq: ziDAQServer, device_id: str, *, single: int) -> None:
"""Enables the scope.
Args:
daq: Instance of a Zurich Instruments API session connected to a Data
Server. The device with identifier device_id is assumed to already
be connected to this instance.
... | 7,739 |
def test_outdated_local(tmpdir, local, remote):
"""Test with remote changes not pulled.
:param tmpdir: pytest fixture.
:param local: conftest fixture.
:param remote: conftest fixture.
"""
# Setup separate local repo now before pushing changes to it from the primary local repo.
local_outdate... | 7,740 |
def load_letter(folder, min_num_images):
"""Load the data for a single letter label."""
image_files = os.listdir(folder)
dataset = np.ndarray(shape=(len(image_files), image_size, image_size),
dtype=np.float32)
image_index = 0
print(folder)
for image in os.listdir(folder):
image_... | 7,741 |
def visualizeTimeSeriesCategorization(dataName, saveDir, numberOfLagsToDraw=3, autocorrelationBased=True):
"""Visualize time series classification.
Parameters:
dataName: str
Data name, e.g. "myData_1"
saveDir: str
Path of directories pointing to data stora... | 7,742 |
def refund(payment_information: Dict, connection_params) -> Dict:
"""Refund a payment using the culqi client.
But it first check if the given payment instance is supported
by the gateway.
It first retrieve a `charge` transaction to retrieve the
payment id to refund. And return an error with a fail... | 7,743 |
def get_args():
"""Get command-line arguments"""
parser = argparse.ArgumentParser(
description='Howler',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('text',
metavar='str',
help='Input text or file')
parser... | 7,744 |
def main ():
"""The function that is called in a command line context. """
# Make sure we aren't unintentionally overwriting an input file
nonOverlapping([featListFile,inputFile,featDefsFile],[outputFile,templateFile])
# Initialize whatever script variables have to be initialized
initia... | 7,745 |
def _ssim_map(
X: torch.Tensor,
Y: torch.Tensor,
data_range: float,
win: torch.Tensor,
K: Tuple[float, float] = (0.01, 0.03),
scales: Tuple[float, float, float] = (1, 1, 1),
gradient_based: bool = False,
) -> Tuple[torch.Tensor, torch.Tensor]:
"""
Given two tensors it calculates ... | 7,746 |
def check_random_state(seed):
"""Turn `seed` into a `np.random.RandomState` instance.
Parameters
----------
seed : {None, int, `numpy.random.Generator`,
`numpy.random.RandomState`}, optional
If `seed` is None (or `np.random`), the `numpy.random.RandomState`
singleton is used.... | 7,747 |
def power3_sum_2method():
"""
Input:
nothing, it have everything it needs.
Output:
sum: summ of all numbers which is power of 3
and fit in between 0 and upper bound == 1000000
"""
k = 0
sum = 0
while True:
a = 3**k
k += 1
if a < 1000000:
s... | 7,748 |
def bitwise_right_shift(rasters, extent_type="FirstOf", cellsize_type="FirstOf", astype=None):
"""
The BitwiseRightShift operation
The arguments for this function are as follows:
:param rasters: array of rasters. If a scalar is needed for the operation, the scalar can be a double or string
:param ... | 7,749 |
def strip_comments(line):
"""Strips comments from a line and return None if the line is empty
or else the contents of line with leading and trailing spaces removed
and all other whitespace collapsed"""
commentIndex = line.find('//')
if commentIndex is -1:
commentIndex = len(line)
line ... | 7,750 |
def fft_pxscale(header,wave):
"""Compute conversion scale from telescope space to sky space.
Parameters
----------
ima : array
2D Telescope pupil model.
Returns
-------
fftscale : float
The frequency scale in sky space.
Example
------... | 7,751 |
def parse_git_submodules(gitmodules_data):
"""Parse a .gitmodules file to extract a { name -> url } map from it."""
result = {}
# NOTE: configparser.ConfigParser() doesn't seem to like the file
# (i.e. read_string() always returns None), so do the parsing
# manually here.
section_nam... | 7,752 |
def get_model_cases(dir_path: pathlib.Path) -> Dict[str, Dict[str, str]]:
"""
Returns the Zen model case for each test if it exists.
:param dir_path: The path to the directory containing the DIFFERENCES directory.
"""
model_cases = defaultdict(dict) # type: Dict[str, Dict[str, str]]
queries_dir... | 7,753 |
def _format_date(event):
"""Returns formated date json object for event"""
old_date = event["date"]
term = event["term"]
dates = old_date.split("-")
if len(dates) == 1:
is_range = False
else:
is_range = True
is_range = (len(dates) > 1)
if is_range:
start_date =... | 7,754 |
def fetch(bibcode, filename=None, replace=None):
"""
Attempt to fetch a PDF file from ADS. If successful, then
add it into the database. If the fetch succeeds but the bibcode is
not in th database, download file to current folder.
Parameters
----------
bibcode: String
ADS bibcode ... | 7,755 |
def crop_multi(x, wrg, hrg, is_random=False, row_index=0, col_index=1):
"""Randomly or centrally crop multiple images.
Parameters
----------
x : list of numpy.array
List of images with dimension of [n_images, row, col, channel] (default).
others : args
See ``tl.prepro.crop``.
R... | 7,756 |
def read_payload_hiding_places(data, orig_filename, vm, vba_code, vba):
"""
Read in text values from all of the various places in Office
97/2000+ that text values can be hidden. This reads values from
things like ActiveX captions, embedded image alternate text,
document variables, form variables, et... | 7,757 |
def value_loss_given_predictions(value_prediction,
rewards,
reward_mask,
gamma,
epsilon,
value_prediction_old=None):
"""Computes the value loss given the... | 7,758 |
def build_pkt(pkt):
"""Build and return a packet and eth type from a dict."""
def serialize(layers):
"""Concatenate packet layers and serialize."""
result = packet.Packet()
for layer in reversed(layers):
result.add_protocol(layer)
result.serialize()
return re... | 7,759 |
def make_file_prefix(run, component_name):
"""
Compose the run number and component name into string prefix
to use with filenames.
"""
return "{}_{}".format(component_name, run) | 7,760 |
def identifier_needs_escaping(text):
"""
Slightly slow, but absolutely correct determination if a given symbol _must_ be escaped.
Necessary when you might be generating column names that could be a reserved keyword.
>>> identifier_needs_escaping("my_column")
False
>>> identifier_needs_escaping(... | 7,761 |
def initialize_lock_and_key_ciphers() -> Dict[str, VigenereCipher]:
"""[summary]
Returns:
Dict[VigenereCipher]: [description]"""
ciphers = {}
with open(CIPHER_RESOURCE, "r") as cipher_resource_file:
cipher_data = load(cipher_resource_file, Loader=FullLoader)
for cipher_key_name, ci... | 7,762 |
def download(redownload=False):
"""Download webpages of retsinformation.dk.
Parameters
----------
redownload : bool, optional
Controls whether the webpages should be redownloaded.
Notes
-----
This function uses the `wget` program, so it will need to be installed.
Download may t... | 7,763 |
def add_service():
"""
Used to register a new service
"""
form = ServiceForm()
if form.validate_on_submit():
try:
srv = Services()
srv.populate_from_form(form)
srv.authentication.value = {"db":request.form.get('authdb'),"user":request.form.get('authuser'... | 7,764 |
def f_columnas_pips(datos):
"""
Parameters
----------
datos : pandas.DataFrame : df con información de transacciones ejecutadas en Oanda,
después de haber ejecutado f_columnas_tiempos
Returns
-------
datos : pandas.DataFrame : df modificado
Debugging
... | 7,765 |
def parse_resolution(resolution):
"""
return: width, height, resolution
"""
resolution = resolution.strip()
splits = resolution.split(',')
return int(splits[0]), int(splits[1]), int(splits[2]) | 7,766 |
def link_cube(cube, locale, provider=None, namespace=None,
ignore_missing=False):
"""Links dimensions to the `cube` in the `context` object. The `context`
object should implement a function `dimension(name, locale, namespace,
provider)`. Modifies cube in place, returns the cube.
"""
# ... | 7,767 |
def _parser() -> argparse.Namespace:
"""Take care of all the argparse stuff.
:returns: the args
"""
# parser = GooeyParser(description='Remove : from data files')
parser = argparse.ArgumentParser(description='Combines Nods using ')
parser.add_argument('listspectra', help='List of spectra to com... | 7,768 |
def get_arguments(func):
"""Returns list of arguments this function has."""
if hasattr(func, '__code__'):
# Regular function.
return inspect.getargspec(func).args
elif hasattr(func, '__call__'):
# Callable object.
print(func)
return _get_arguments(func.__call__)
e... | 7,769 |
def _check(isamAppliance, name):
"""
Check if suffix exists
"""
ret_obj = get(isamAppliance)
check_value, warnings = False, ret_obj['warnings']
if warnings == []:
for suffix in ret_obj['data']:
if suffix['name'] == name:
logger.info("Suffix found in embedded ... | 7,770 |
def check_listening_address(address: str) -> bool:
"""Check entered ip address for validity."""
if address == 'localhost':
return True
return address in get_local_addresses() | 7,771 |
def multibase_b64decode(data):
"""
Follow forge's base64 urlsafe encode convention to decode string
Args:
data(string): encoded string
Returns: bytes
Examples:
>>> multibase_b64decode('aGVsbG8')
b'hello'
"""
if isinstance(data, str):
data = data.encode()
... | 7,772 |
async def test_turn_off(hass):
"""Test that turn off service calls function."""
mock_entity_id = await setup_mock_component(hass)
mock_func = "{}{}".format(
"homeassistant.components.ps4.media_player.", "pyps4.Ps4Async.standby"
)
with patch(mock_func) as mock_call:
await hass.servic... | 7,773 |
def parse_ordering_params(param: List[str]) -> List[str]:
"""
Ignores the request to sort by "ord".
Returns a sorting order based on the params and includes "readable_id"
sorting in passed params if the sorting request contains title
otherwise, it returns the requested order.
"""
if "ord" in... | 7,774 |
def find_requests(from_dt=None, to_dt=None, contribs_and_sessions=True):
"""Finds requests matching certain criteria.
:param from_dt: earliest event/contribution to include
:param to_dt: latest event/contribution to include
:param contribs_and_sessions: whether it should return contributions and sessio... | 7,775 |
def file_senzing_rabbitmq():
"""#!/usr/bin/env bash
# --- Functions ---------------------------------------------------------------
function up {
echo -ne "\033[2K${CONTAINER_NAME} status: starting...\r"
mkdir -p ${RABBITMQ_DIR}
chmod 777 ${RABBITMQ_DIR}
if [ "${CONTAINER_VERSION}" == "latest" ]... | 7,776 |
def mse(predictions, targets):
"""Calculate MSE: (Mean squared error)
"""
return ((predictions - targets) ** 2).mean() | 7,777 |
def export1d(hist):
"""Export a 1-dimensional `Hist` object to uproot
This allows one to write a coffea histogram into a ROOT file, via uproot.
Parameters
----------
hist : Hist
A 1-dimensional histogram object
Returns
-------
out
A ``uproot_methods.cla... | 7,778 |
def filter_words(w_map, emb_array, ck_filenames):
""" delete word in w_map but not in the current corpus """
vocab = set()
for filename in ck_filenames:
for line in open(filename, 'r'):
if not (line.isspace() or (len(line) > 10 and line[0:10] == '-DOCSTART-')):
line = lin... | 7,779 |
def take(data, indices, dim):
"""Takes elements from an input array along the given dim.
Parameters
----------
data : Tensor
The data tensor.
indices : Tensor
The indices tensor.
dim : Tensor
The dimension to gather along.
"""
pass | 7,780 |
def get_cache_template(sources, grids, geopackage, table_name="tiles"):
"""
Returns the cache template which is "controlled" settings for the application.
The intent is to allow the user to configure certain things but impose specific behavior.
:param sources: A name for the source
:param grids: sp... | 7,781 |
def plotter(fdict):
""" Go """
pgconn = get_dbconn('coop')
ccursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor)
ctx = get_autoplot_context(fdict, get_description())
station = ctx['station']
lagmonths = ctx['lag']
months = ctx['months']
month = ctx['month']
highyears = [... | 7,782 |
def output_folder_fixture():
"""Creates the necessary folder and cleans up after test is done."""
if not os.path.exists(OUTPUT_FOLDER):
os.mkdir(OUTPUT_FOLDER)
yield OUTPUT_FOLDER
shutil.rmtree(OUTPUT_FOLDER, ignore_errors=True) | 7,783 |
def _action_spec():
"""Returns the action spec."""
paddle_action_spec = dm_env_rpc_pb2.TensorSpec(
dtype=dm_env_rpc_pb2.INT8, name=_ACTION_PADDLE)
tensor_spec_utils.set_bounds(
paddle_action_spec,
minimum=np.min(_VALID_ACTIONS),
maximum=np.max(_VALID_ACTIONS))
return {1: paddle_action_sp... | 7,784 |
def test_split_stats_manual() -> None:
"""
Test that `get_split_statistics()` correctly computes the z-score over the pairwise
differences in task gradients for manually computed values.
"""
# Set up case.
settings = dict(V1_SETTINGS)
settings["num_layers"] = 1
settings["num_tasks"] = 4... | 7,785 |
def test_cli_compile(patch, runner, echo, app):
"""
Ensures the compile command compiles a story.
"""
patch.object(click, 'style')
runner.invoke(Cli.compile, [])
App.compile.assert_called_with(os.getcwd(), ebnf=None,
ignored_path=None, concise=False,
... | 7,786 |
def test_kms_key_policy():
"""
To test that key policy is applied is passed in
"""
template = Template()
key_admins = "arn:aws:iam::111122223333:user/admin1"
key_users = ["arn:aws:iam::111122223333:user/user1", "arn:aws:iam::444455556666:user/user2"]
kms_key = (KmsKey(key_title='MyTestKey'... | 7,787 |
async def uptime(ctx: commands.Context):
"""Tells how long the bot has been online."""
delta = datetime.timedelta(seconds=int(time.time() - botstart))
await ctx.send("**Uptime:** {}".format(str(delta))) | 7,788 |
def stdin(sys_stdin):
"""
Imports standard input.
"""
inputs = [x.strip("[]\n") for x in sys_stdin]
a = [int(x) for x in inputs[0].split(",")]
x = int(inputs[1][0])
return a, x | 7,789 |
def _row_key(row):
"""
:param row: a normalized row from STATEMENT_METRICS_QUERY
:return: a tuple uniquely identifying this row
"""
return row['database_name'], row['user_name'], row['query_signature'], row['query_hash'], row['query_plan_hash'] | 7,790 |
def is_packet_length(outter_key, inner_key) -> None:
"""Prints packet length"""
if outter_key == "packet":
if inner_key.get('length').get('min') is not None:
make_list = is_instance(inner_key.get('length').get('min'))
print(f"{'Pkt Length(min):':>15} {', '.join(make_list)}... | 7,791 |
def get (url, user_agent=UA, referrer=None):
"""Make a GET request of the url using pycurl and return the data
(which is None if unsuccessful)"""
data = None
databuffer = StringIO()
curl = pycurl.Curl()
curl.setopt(pycurl.URL, url)
curl.setopt(pycurl.FOLLOWLOCATION, 1)
curl.setopt(pycu... | 7,792 |
def detect_callec(tree):
"""Collect names of escape continuations from call_ec invocations in tree.
Currently supported and unsupported cases::
# use as decorator, supported
@call_ec
def result(ec): # <-- we grab name "ec" from here
...
# use directly on a literal... | 7,793 |
def test_distributed_evaluation_multiprocessing(do_mwcp=True):
"""
Full test run using the Distributed Evaluator (fake nodes using processes).
Note that this is not a very good test for the
DistributedEvaluator, because we still work on
one machine, not across multiple machines.
We emulate the o... | 7,794 |
def apply_filters(
stream: StreamMeta, filters: List[Tuple[str, str]], config: Any
) -> StreamMeta:
"""Apply enabled filters ordered by priority on item"""
filter_pool = get_filter_pool(filters, config)
for filter_instance in filter(
lambda x: x.enabled, sorted(filter_pool, key=lambda x: x.prio... | 7,795 |
def threading_d(func):
"""
A decorator to run function in background on thread
Args:
func:``function``
Function with args
Return:
background_thread: ``Thread``
"""
@wraps(func)
def wrapper(*args, **kwags):
background_thread = Thread(target=func, args=(*args,))
ba... | 7,796 |
def create_anchors_3d_stride(
feature_size,
sizes=[1.6, 3.9, 1.56],
anchor_strides=[0.4, 0.4, 0.0],
anchor_offsets=[0.2, -39.8, -1.78],
rotations=[0, np.pi / 2],
velocities=[],
dtype=np.float32,
):
"""
Args:
feature_size: list [D, H, W](zyx)
sizes: [N, 3] list of list... | 7,797 |
def test_mnemonic_inventory():
"""Test the retrieval of the mnemonic inventory."""
all_mnemonics = mnemonic_inventory()[0]
assert len(all_mnemonics) > 1000 | 7,798 |
def sha1_file(filename):
"""
Return the hex string representation of the SHA1 checksum of the filename
"""
import hashlib
s = hashlib.sha1()
with open(filename, "rb") as f:
for line in f:
s.update(line)
return s.hexdigest() | 7,799 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.