content stringlengths 22 815k | id int64 0 4.91M |
|---|---|
def build_parser(args):
""" A method to handle argparse.
"""
parser = argparse.ArgumentParser(usage='$ python verdict.py',
description='''Downloads, filters and
re-publishes the Google
... | 12,800 |
def parse_query(qd, session, config):
"""Parses the given query dictionary to produce a BaseQuery object."""
from mlalchemy.parser import parse_query as mlalchemy_parse_query
defaults = {
"limit": config["default_limit"],
"backref_limit": config["default_backref_limit"],
"backref_de... | 12,801 |
def get_covid19_us_bears(
url_root=CSV_URL_ROOT,
file_prefix=CSV_FILE_PREFIX,
file_suffix=CSV_FILE_SUFFIX,
encoding=CSV_ENCODING) -> Dict[Dict[Bears]]:
"""Converts USAFACTS confirmed and deaths CSV files to state and county
`Bears` to a dictionary of dictionaries.
Args:
url_root (str): URL pr... | 12,802 |
def jsonify_promise(
future_obj: Input[Jsonable],
indent: Input[Optional[Union[int, str]]]=None,
separators: Input[Optional[Tuple[str, str]]]=None
) -> Output[str]:
"""Convert a Promise object to a Promise to jsonify the result of that Promise.
An asyncronous (Promise) version of json.dumps()... | 12,803 |
def run_adriz(flc_files):
""" Runs AstroDrizzle in order to create cosmic ray masks and to obtain
an estimate of the global sky background in new keyword 'mdrizsky'.
"""
# Assumes first 6 letters of a visit's images are all the same.
common = flc_files[0][:6]
search = '{}*flc.fits'.format(commo... | 12,804 |
def randnums(start, stop, n_samples):
"""
Helper function to select real samples and generate fake samples
"""
ix = []
for i in range(n_samples):
ix.append(randint(start, stop))
ix = np.array(ix)
return ix | 12,805 |
def shift_1_spectra(spectra, shift):
""" This method find the relative position of the FFT of the two spectras \
in order to later k-linearize.
Args:
:param spectra1: OCT spectra of first mirror.
:type spectra1: list
Return:
:rname: Zspace: - pi to pi linear vector space
... | 12,806 |
def hpat_pandas_series_div(self, other, level=None, fill_value=None, axis=0):
"""
Pandas Series method :meth:`pandas.Series.div` and :meth:`pandas.Series.truediv` implementation.
.. only:: developer
Test: python -m sdc.runtests sdc.tests.test_series.TestSeries.test_series_op5
Parameters
---... | 12,807 |
def alias(self, arg):
"""
set the new alias to magic
*alias alias1 string*
alias1 is added into magic command
"""
if arg == '' or arg.lower() == 'help':
return dbhelp(self, 'alias')
name, fstring = arg.split(" ", 1)
print "new alias: %s <%s>" % (DBPRINT.msg_green(name), fstring)
... | 12,808 |
def _transform_cat_options(metadata: dict) -> pd.DataFrame:
"""Transform category options metadata into a formatted DataFrame."""
df = pd.DataFrame.from_dict(metadata.get("categoryOptions"))
df = df[["id", "code", "shortName", "name"]]
df.columns = ["co_uid", "co_code", "co_shortname", "co_name"]
re... | 12,809 |
def idaview(request, idadb, idadf):
"""
IdaDataFrame fixture to be used for the whole testing session. Open a view
based on idadf fixture.
"""
def fin():
try:
idadb.drop_view("TEST_VIEW_ibmdbpy")
idadb.commit()
except:
pass
request.addfinalizer... | 12,810 |
def fields() -> None:
"""IoT Fields""" | 12,811 |
def get_openmp_flag(compiler):
"""Returns list of flags for using OpenMP depending on compiler and
platform.
Parameters
----------
compiler : numpy.distutils.compiler
Compiler used when invoking setup.py build
"""
if hasattr(compiler, 'compiler'):
compiler = compiler.compil... | 12,812 |
def heuristical_lengths(items):
"""
heuristical_lengths tries to deriver the lengths of the content of items.
It always returns a list.
a) If typeof(items) is a string, it'll return [len(items)]
b) If typeof(items) is a dict, it'll return [len(items)]
c) If typeof(items) is either list or tuple,... | 12,813 |
def tifpages(file_id, filename, db_cursor):
"""
Check if TIF has multiple pages
"""
p = subprocess.Popen(['identify', '-format', '%n\\n', filename], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(out, err) = p.communicate()
try:
if int(len(out.split())) == 1:
pages_... | 12,814 |
def next(space, w_arr):
""" Advance the internal array pointer of an array """
length = w_arr.arraylen()
current_idx = w_arr.current_idx + 1
if current_idx >= length:
w_arr.current_idx = length
return space.w_False
w_arr.current_idx = current_idx
return w_arr._current(space) | 12,815 |
def geocoordinatess_id_get(id, username=None): # noqa: E501
"""Get a single GeoCoordinates by its id
Gets the details of a given GeoCoordinates (more information in https://w3id.org/okn/o/sdm#GeoCoordinates) # noqa: E501
:param id: The ID of the GeoCoordinates to be retrieved
:type id: str
:param... | 12,816 |
def pprint(object, stream=None, indent=1, width=80, depth=None, *,
compact=False):
"""Pretty-print a Python object to a stream [default is sys.stdout]."""
printer = PrettyPrinter(
stream=stream, indent=indent, width=width, depth=depth, compact=compact
)
printer.pprint(object) | 12,817 |
def check_auth(request):
"""Check authentication on request.
:param request: Flask request
:raises: utils.Error if access is denied
"""
if not conf.getboolean('discoverd', 'authenticate'):
return
if request.headers.get('X-Identity-Status').lower() == 'invalid':
raise Error('Auth... | 12,818 |
def timer_cb(watcher, revents):
""" Timed callback, right out of the book. """
watcher.data += 1
print("timer.data: {0}".format(watcher.data))
print("timer.loop.iteration: {0}".format(watcher.loop.iteration))
print("timer.loop.now(): {0}".format(watcher.loop.now())) | 12,819 |
def check_callable(target, label=None):
"""Checks target is callable and then returns it."""
if not callable(target):
raise TypeError('Expected {} callable, found non-callable {}.'.format(
'{} to be'.format(label) if label is not None else 'a',
type_string(type(target))))
return target | 12,820 |
def convert_dictionary_values(d, map={}):
"""convert string values in a dictionary to numeric types.
Arguments
d : dict
The dictionary to convert
map : dict
If map contains 'default', a default conversion is enforced.
For example, to force int for every column but column ``id``,
... | 12,821 |
def GetLimitPB(user, action_type):
"""Return the apporiate action limit PB part of the given User PB."""
if action_type == PROJECT_CREATION:
if not user.project_creation_limit:
user.project_creation_limit = user_pb2.ActionLimit()
return user.project_creation_limit
elif action_type == ISSUE_COMMENT:
... | 12,822 |
def test_compress_bam_dry_run(bam_tmp_file, base_context):
"""Test to run the compress bam command"""
# GIVEN the path to a existing bam file and a cli runner
runner = CliRunner()
bam_path = bam_tmp_file
assert bam_path.exists()
# WHEN running the compress command with dry_run
result = runne... | 12,823 |
def best_int_dtype(data):
"""get bit depth required to best represent float data as int"""
d, r = divmod(np.log2(data.ptp()), 8)
d = max(d, 1)
i = (2 ** (int(np.log2(d)) + bool(r)))
return np.dtype('i%d' % i) | 12,824 |
def gen_decorate_name(*args):
"""
gen_decorate_name(name, mangle, cc, type) -> bool
Generic function for 'decorate_name()' (may be used in IDP modules)
@param name (C++: const char *)
@param mangle (C++: bool)
@param cc (C++: cm_t)
@param type (C++: const tinfo_t *)
"""
return _ida_typeinf.g... | 12,825 |
def main(self, count=10):
"""
kosmos -p 'j.servers.myjobs.test("start")'
"""
self.reset()
def wait_1sec():
gevent.sleep(1)
return "OK"
ids = []
for x in range(count):
job_sch = self.schedule(wait_1sec)
ids.append(job_sch.id)
self._workers_gipc_nr_max =... | 12,826 |
async def uptime(ctx):
"""Displays how long the bot has been online for"""
second = time.time() - start_time
minute, second = divmod(second, 60)
hour, minute = divmod(minute, 60)
day, hour = divmod(hour, 24)
week, day = divmod(day, 7)
await ctx.send(
"I've been online for %d ... | 12,827 |
def get_asc() -> pd.DataFrame:
"""Get Yahoo Finance small cap stocks with earnings growth rates better than 25%. [Source: Yahoo Finance]
Returns
-------
pd.DataFrame
Most aggressive small cap stocks
"""
url = "https://finance.yahoo.com/screener/predefined/aggressive_small_caps"
dat... | 12,828 |
def getEnabled(chat_id):
"""Gets the status of a conversation"""
status = EnableStatus.get_by_id(str(chat_id))
if status:
return status.enabled
return False | 12,829 |
def create_app(config=DevelopConfig):
"""App factory."""
app = Flask(
__name__.split('.')[0],
static_url_path='/static',
static_folder=f'{config.PROJECT_PATH}/src/static'
)
app.url_map.strict_slashes = False
app.config.from_object(config)
register_extensions(app)
regi... | 12,830 |
def write_charging_cost_results(
record, calculated_annual_charging_cost,
calculated_annual_charging_kwh, ev_specific_rate,
csv_writer
):
"""
Write the charging cost results for a record.
:param record:
:param calculated_annual_charging_cost:
:param calculated_annual_charging... | 12,831 |
def merge_inputs_for_create(task_create_func):
"""Merge all inputs for start operation into one dict"""
# Needed to wrap the wrapper because I was seeing issues with
# "RuntimeError: No context set in current execution thread"
def wrapper(**kwargs):
# NOTE: ctx.node.properties is an ImmutablePr... | 12,832 |
def embedding_lookup(params, ids):
"""Wrapper around ``tf.nn.embedding_lookup``.
This converts gradients of the embedding variable to tensors which allows
to use of optimizers that don't support sparse gradients (e.g. Adafactor).
Args:
params: The embedding tensor.
ids: The ids to lookup in :obj:`para... | 12,833 |
def value_as_unit(value: T | None, unit: Unit = None) -> T | Quantity[T] | None:
"""Return value as specified unit or sensor fault if value is none."""
if value is None:
return None
if unit is None:
return value
return value * unit | 12,834 |
def get_static_spatial_noise_image(image) :
""" The first step is to sum all of the odd-numbered images (sumODD image)
and separately sum all of the even-numbered images (sumEVEN image). The
difference between the sum of the odd images and the sum of the even
images (DIFF = sumODD - sumEVEN)... | 12,835 |
def get_gallery_dir() -> str:
"""
Return the path to the mephisto task gallery
"""
return os.path.join(get_root_dir(), "gallery") | 12,836 |
def get_next_action():
""" gets the next action to perform, based on get_action_odds """
action_odds = get_action_odds()
#print(f"DEBUG action_odds {action_odds}")
# get the sum of all the action odds values
total = 0
for action in action_odds:
#print(f"DEBUG get_next_action total ... | 12,837 |
def get_movie_title(movie_id):
"""
Takes in an ID, returns a title
"""
movie_id = int(movie_id)-1
return items.iloc[movie_id]['TITLE'] | 12,838 |
def draw_material(material, face=GL_FRONT_AND_BACK):
"""Draw a single material"""
if material.gl_floats is None:
material.gl_floats = (GLfloat * len(material.vertices))(*material.vertices)
material.triangle_count = len(material.vertices) / material.vertex_size
vertex_format = VERTEX_FORMATS... | 12,839 |
def get_logger():
"""
Return a logger object
"""
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
handler = logging.StreamHandler()
handler.setLevel(logging.INFO)
formatter = logging.Formatter("%(asctime)s - %(message)s")
handler.setFormatter(formatter)
logge... | 12,840 |
def test_employment():
"""Test module employment.py by downloading
employment.csv and testing shape of
extracted data has 24 rows and 4 columns
"""
test_path = tempfile.mkdtemp()
x_train, metadata = employment(test_path)
try:
assert x_train.shape == (24, 4)
except:
shutil.rmtree(test_path)
... | 12,841 |
def wav(args, scope_data, infile):
"""Create an audible .wav file for use in LTSpice."""
wav_name = os.path.splitext(infile)[0] + '.wav'
if os.path.isfile(wav_name) and not args.force:
print("'%s' exists, use --force to overwrite" % wav_name)
return
scope_data.wav(wav_name, channel=args... | 12,842 |
def vim_image_api_delete_image(connection, msg):
"""
Handle Delete-Image API request
"""
global _image_delete_operations
DLOG.verbose("Delete image, uuid=%s." % msg.uuid)
_image_delete_operations[msg.uuid] = connection
image_director = directors.get_image_director()
image_director.image... | 12,843 |
def enough_gap_since_last_obs(df, current_state, obs_log):
"""
Determine if a sufficient time has passed since the last observation
in this subprogram (in any filter):
"""
now = current_state['current_time'].mjd
# don't mess up with the upstream data structure
df = df.copy()
grp = df.... | 12,844 |
def find_nearest_feature_to_attribute(sentence, features, attribute):
"""
Parameters
----------
sentence: str,
One sentence from the info text of a mushroom species
features: list of strs
List of possible features as in dataset_categories.features_list
attribute: str,
Mushro... | 12,845 |
def test_if_elif_else():
"""
>>> d = {'v':1}
>>> txt = '''
... {{if v==1:}}1{{elif v==2:}}2{{else:}}other{{pass}}
... '''
>>> print (template(txt, d))
<BLANKLINE>
1
<BLANKLINE>
>>> print (template(txt, {'v':2}))
<BLANKLINE>
2
<BLANKLINE>
>>> print (template(txt, {... | 12,846 |
def lists():
"""
库存列表
:return:
"""
template_name = 'inventory/lists.html'
# 文档信息
document_info = DOCUMENT_INFO.copy()
document_info['TITLE'] = _('inventory lists')
# 搜索条件
form = InventorySearchForm(request.form)
form.warehouse_id.choices = get_warehouse_choices()
form.ra... | 12,847 |
def open_1d_txt(filename, xaxcol=0, datacol=1, errorcol=2,
text_reader='simple', format=None, **kwargs):
"""
Attempt to read a 1D spectrum from a text file assuming wavelength as the
first column, data as the second, and (optionally) error as the third.
Reading can be done either with a... | 12,848 |
def create_tables(hpo_id, drop_existing=False):
"""
Create the achilles related tables
:param hpo_id: associated hpo id
:param drop_existing: if True, drop existing tables
:return:
"""
for table_name in ACHILLES_HEEL_TABLES:
table_id = bq_utils.get_table_id(hpo_id, table_name)
... | 12,849 |
def flux_to_sql(con, solute_db, site_key,leg,site,hole,solute,flux,
burial_flux,gradient,porosity,z,dp,bottom_conc,conc_fit,
r_squared,age_depth_boundaries,sedrate,advection,precision,ds,
temp_d,bottom_temp,bottom_temp_est,cycles,por_error,mean_flux,
m... | 12,850 |
def clean_record(raw_string: str) -> str:
"""
Removes all unnecessary signs from a raw_string and returns it
:param raw_string: folder or file name to manage
:return: clean value
"""
for sign in ("'", '(', ')', '"'):
raw_string = raw_string.replace(sign, '')
return raw_string.replace... | 12,851 |
def df_of_tables_for_dd_ids(dd_ids, sqlite_tables, sql_con):
"""
:param list dd_ids: list of Deep Dive IDs to retrieve
:param list sqlite_tables: list of SQLite tables to join
:param sqlalchemy.create_engine sql_con: Connection to SQLite (can be \
omitted)
:returns: `pandas.DataFrame` -- datafra... | 12,852 |
def get_group_type(group: Union[hou.EdgeGroup, hou.PointGroup, hou.PrimGroup]) -> int:
"""Get an HDK compatible group type value.
:param group: The group to get the group type for.
:return: An HDK group type value.
"""
try:
return _GROUP_TYPE_MAP[type(group)]
except KeyError as exc:
... | 12,853 |
def download_data(count, n_dims, n_classes, outpath, name):
"""Download data set to the data folder for further usage"""
dataset, ground_truth = make_classification(
n_samples=count,
n_features=n_dims,
n_informative=2,
n_redundant=2,
n_repeated=0,
n_classes=n_clas... | 12,854 |
def test_positive() -> None:
"""
Run mypy on the positive test file. There should be no errors.
"""
subprocess.check_call(get_mypy_cmd(POSITIVE_FILE)) | 12,855 |
def mp2d_driver(jobrec, verbose=1):
"""Drive the jobrec@i (input) -> mp2drec@i -> mp2drec@io -> jobrec@io (returned) process."""
return module_driver(
jobrec=jobrec, module_label='mp2d', plant=mp2d_plant, harvest=mp2d_harvest, verbose=verbose) | 12,856 |
def query():
"""
TODO: add secret file and pass them to functions
"""
# client = get_client_as_service_account('/path/to/service_secret.json')
# client = get_client_as_user_account('/path/to/client_secret.json', False)
client = get_client_with_ADC()
query_job = client.query(QUERY_STRING)
... | 12,857 |
def query_user_list():
"""
Retrieve list of users on user watch list.
"""
conn = connect.connect()
cur = conn.cursor()
cur.execute("SELECT * FROM watched_users")
watched_users = cur.fetchall()
return watched_users | 12,858 |
def service_stop_list(service_id, direction):
""" Queries all patterns for a service and creates list of stops sorted
topologically.
:param service_id: Service ID.
:param direction: Groups journey patterns by direction - False for
outbound and True for inbound.
"""
graph, di... | 12,859 |
def test_infer_a_json_converter(json_sample_path):
"""Infer a JSONConverter from file path string."""
json = infer_converter_from_file_type(json_sample_path)
assert isinstance(json, JSONConverter) | 12,860 |
def convert_coord(value):
"""将GPS值转换为度分秒形式
Args:
value(str): GPS读取的经度或纬度
Returns:
list: 度分秒列表
"""
v1, v2 = value.split('.')
v2_dec = Decimal(f'0.{v2}') * 60 # + Decimal(random.random())
return [v1[:-2], v1[-2:], v2_dec.to_eng_string()] | 12,861 |
def _interpolate_zbuf(
pix_to_face: torch.Tensor, barycentric_coords: torch.Tensor, meshes
) -> torch.Tensor:
"""
A helper function to calculate the z buffer for each pixel in the
rasterized output.
Args:
pix_to_face: LongTensor of shape (N, H, W, K) specifying the indices
of th... | 12,862 |
def pp_chain(chain: Sequence[Subtree]) -> str:
"""Pretty-print a chain
"""
return ' '.join(
s.label if isinstance(s, ParentedTree) else str(s)
for s in chain
) | 12,863 |
def ptr_ty(ty : 'LLVMType') -> 'LLVMPointerType':
"""``ty*``, i.e. a pointer to a value of type ``ty``."""
return LLVMPointerType(ty) | 12,864 |
async def test_set_fan_speed(hass: HomeAssistant, device: Dyson360Eye):
"""Test setting fan speed of the vacuum."""
fan_speed_map = {
"Max": PowerMode.MAX,
"Quiet": PowerMode.QUIET,
}
for service_speed, command_speed in fan_speed_map.items():
await hass.services.async_call(
... | 12,865 |
def series_spline(self):
"""Fill NaNs using a spline interpolation."""
inds, values = np.arange(len(self)), self.values
invalid = isnull(values)
valid = -invalid
firstIndex = valid.argmax()
valid = valid[firstIndex:]
invalid = invalid[firstIndex:]
inds = inds[firstIndex:]
result ... | 12,866 |
def label(job_name, p5_connection=None):
"""
Syntax: Job <name> label
Description: Returns the (human readable) job label.
The following labels are returned:
Archive, Backup, Synchronize and System.
A Job label can be used in conjunction with the Job describe command to
better display the jo... | 12,867 |
def rating(pairing, previous):
"""The lower the rating value is the better"""
current = set(chain.from_iterable(pair[1] for pair in pairing))
overlaps = current & set(previous)
if overlaps:
return sum(math.pow(0.97, previous[overlap] / 86400) for overlap in overlaps)
return 0.0 | 12,868 |
def copy_multipart_passthrough(src_blob: AnyBlob,
dst_blob: CloudBlob,
compute_checksums: bool=False) -> Optional[Dict[str, str]]:
"""
Copy from `src_blob` to `dst_blob`, passing data through the executing instance.
Optionally compute checksums.
... | 12,869 |
def read_manifest_from_csv(filename):
"""
Read the ballot manifest into a list in the format ['batch id : number of ballots']
from CSV file named filename
"""
manifest = []
with open(filename, newline='') as csvfile:
reader = csv.reader(csvfile, delimiter = ",")
for row in reader... | 12,870 |
def open_github(subdir=None):
"""Opens the GitHub repository for this package.
Args:
subdir (str, optional): Sub-directory of the repository. Defaults to None.
"""
import webbrowser
url = 'https://github.com/giswqs/geemap'
if subdir == 'source':
url += '/tree/master/geemap/'
... | 12,871 |
def siblings_list():
"""
Shows child element iteration
"""
o = untangle.parse(
"""
<root>
<child name="child1"/>
<child name="child2"/>
<child name="child3"/>
</root>
"""
)
return ",".join([child["name"] for child in o.root.chil... | 12,872 |
def process_linked_datasets(labbook: LabBook, logged_in_username: str) -> None:
"""Method to update or init any linked dataset submodule references, clean up lingering files, and schedule
jobs to auto-import if needed
Args:
labbook: the labbook to analyze
logged_in_username: the current log... | 12,873 |
def max_distance_from_home(traj, start_night='22:00', end_night='07:00', show_progress=True):
"""
Compute the maximum distance from home (in kilometers) traveled by an individual.
:param traj: the trajectories of the individuals
:type traj: TrajDataFrame
:param str start_night: the starting ti... | 12,874 |
def combine_histogram(old_hist, arr):
""" Collect layer histogram for arr and combine it with old histogram.
"""
new_max = np.max(arr)
new_min = np.min(arr)
new_th = max(abs(new_min), abs(new_max))
(old_hist, old_hist_edges, old_min, old_max, old_th) = old_hist
if new_th <= old_th:
h... | 12,875 |
def main():
"""Make a jazz noise here"""
args = get_args()
annotations = args.annotations
outfile = args.outfile
input = args.csv
if not os.path.isfile(input):
die('"{}" is not a file'.format(input))
if not os.path.isfile(annotations):
die('"{}" is not a file'.format(annota... | 12,876 |
def test_update_secondary_ids(client):
"""Function to test the update_secondary_ids function"""
customer = Customer(client, number='+254711892648')
data = [
{"key": "passport", "value": "808083", "expires_at": 300000000},
{
"key": "huduma",
"value": "808082",
... | 12,877 |
async def repo_is_here(wannasee):
""" For .repo command, just returns the repo URL. """
await wannasee.edit("[Repo](https://github.com/tesbot07/ironbot) Lihat ke GitHub.") | 12,878 |
def unmunchify(x):
""" Recursively converts a Munch into a dictionary.
>>> b = Munch(foo=Munch(lol=True), hello=42, ponies='are pretty!')
>>> sorted(unmunchify(b).items())
[('foo', {'lol': True}), ('hello', 42), ('ponies', 'are pretty!')]
unmunchify will handle intermediary dicts, ... | 12,879 |
def split_data(mapping, encoded_sequence):
""" Function to split the prepared data in train and test
Args:
mapping (dict): dictionary mapping of all unique input charcters to integers
encoded_sequence (list): number encoded charachter sequences
Returns:
numpy array : train and test... | 12,880 |
def plot_spectral_hist(freqs, power_bins, spectral_hist, spectrum_freqs=None,
spectrum=None, ax=None, **kwargs):
"""Plot spectral histogram.
Parameters
----------
freqs : 1d array
Frequencies over which the histogram is calculated.
power_bins : 1d array
Power ... | 12,881 |
def plot_data(data, savefile):
"""Creates a phase diagram plot from the data by using a "for" loop to plot each
column within the dataframe with a series of predefined colors for each column
and saves the figure as a jpg."""
phase_diagram_fig = plt.figure()
plot1 = phase_diagram_fig.add_subplot()
... | 12,882 |
def fsp_loss(teacher_var1_name,
teacher_var2_name,
student_var1_name,
student_var2_name,
program=None):
"""Combine variables from student model and teacher model by fsp-loss.
Args:
teacher_var1_name(str): The name of teacher_var1.
teacher_var2... | 12,883 |
def diff_numpy_array(A, B):
"""
Numpy Array A - B
return items in A that are not in B
By Divakar
https://stackoverflow.com/a/52417967/1497443
"""
return A[~np.in1d(A, B)] | 12,884 |
def load_training_data(training_fns, trunc_min_scores,
trunc_max_scores, debug=False):
""" First parse group, read and position to find shared data points
Then read in training scores, truncating as appropriate """
# Parse file twice. First time get all the loci, second time all the value data
... | 12,885 |
def _find_data_between_ranges(data, ranges, top_k):
"""Finds the rows of the data that fall between each range.
Args:
data (pd.Series): The predicted probability values for the postive class.
ranges (list): The threshold ranges defining the bins. Should include 0 and 1 as the first and last val... | 12,886 |
def validar_entero_n():
"""
"""
try:
n = int(input('n= ')) #si es un float también funciona el programa
except:
print ('Número no válido')
return False
else:
return n | 12,887 |
def Conv_Cifar10_32x64x64():
"""A 3 hidden layer convnet designed for 32x32 cifar10."""
base_model_fn = _cross_entropy_pool_loss([32, 64, 64],
jax.nn.relu,
num_classes=10)
datasets = image.cifar10_datasets(batch_size=128)
retu... | 12,888 |
def rotate(angle_list: List, delta: float) -> List:
"""Rotates a list of angles (wraps around at 2 pi)
Args:
angle_list (List): list of angles in pi radians
delta (float): amount to change in pi radians
Returns:
List: new angle list in pi radians
"""
new_angle_list = []
... | 12,889 |
def main():
"""
Parses command line arguments and invokes the appropriate method to respond to them
Returns
-------
None
"""
parser = argparse.ArgumentParser(
prog='harmony-gdal', description='Run the GDAL service'
)
harmony.setup_cli(parser)
args = parser.parse_args()... | 12,890 |
def global_average_pooling_3d(tensor: TorchTensorNCX) -> TorchTensorNCX:
"""
3D Global average pooling.
Calculate the average value per sample per channel of a tensor.
Args:
tensor: tensor with shape NCDHW
Returns:
a tensor of shape NC
"""
assert len(tensor.shape) == 5, 'm... | 12,891 |
def verbose_create():
"""
Initiate detailed post creation process
"""
create(get_input()) | 12,892 |
def get_LAB_L_SVD_s(image):
"""Returns s (Singular values) SVD from L of LAB Image information
Args:
image: PIL Image or Numpy array
Returns:
vector of singular values
Example:
>>> from PIL import Image
>>> from ipfml.processing import transform
>>> img = Image.open('./im... | 12,893 |
def print_metrics(y_t, y_pred_t, mode=''):
"""
Print metrics of various kind
Parameters
----------
y_t :
y_pred_t :
mode : string
"""
print('Model performance on the {} dataset:'.format(mode))
# mse = mean_squared_error(y_t, y_pred_t)
# logloss = log_loss(y_t, y_pred_t)
... | 12,894 |
def path_inclusion_filter_fn(path, param, layer):
"""Returns whether or not layer name is contained in path."""
return layer in path | 12,895 |
def _parse_feature(line: PipelineRecord) -> Tuple[str, Coordinates, Feature]:
""" Creates a Feature from a line of output from a CSVReader """
contig = line[0]
coordinates = parse_coordinates(line[1])
feature = line[2]
# Piler-cr and BLAST both use 1-based indices, but Opfi uses 0-based indices.
... | 12,896 |
def average_summary_df_tasks(df, avg_columns):
""" Create averages of the summary df across tasks."""
new_df = []
# Columns to have after averaging
keep_cols = ["dataset", "method_name", "trial_number"]
subsetted = df.groupby(keep_cols)
for subset_indices, subset_df in subsetted:
return... | 12,897 |
def show_result(img, result, class_names, score_thr=0.3, out_file=None):
"""Visualize the detection results on the image.
Args:
img (str or np.ndarray): Image filename or loaded image.
result (tuple[list] or list): The detection result, can be either
(bbox, segm) or just bbox.
... | 12,898 |
def ml_variance(values, mean):
"""
Given a list of values assumed to come from a normal distribution and
their maximum likelihood estimate of the mean, compute the maximum
likelihood estimate of the distribution's variance of those values.
There are many libraries that do something like this, but th... | 12,899 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.