content stringlengths 22 815k | id int64 0 4.91M |
|---|---|
def test_initialize_with_displacements_and_force_sets_input(
generate_workchain,
generate_structure,
generate_displacements,
generate_settings,
generate_force_sets,
):
"""Test of PhonopyWorkChain.initialize() using NaCl data.
`displacements` (random displacements) is given as an input.
... | 6,500 |
def rasterize(
vectors,
layer=0,
output=None,
nodata=None,
pixel_size=None,
bounds=None,
affine=None,
shape=None,
attribute=None,
fill=0,
default_value=1,
):
"""Rasterize features
Options for definining the boundary and pixel size of rasterization:
User may prov... | 6,501 |
def blockchain_key_seed(request):
""" Private key template for the nodes in the private blockchain, allows
different keys to be used for each test to avoid collisions.
"""
# Using the test name as part of the template to force the keys to be
# different accross tests, otherwise the data directories ... | 6,502 |
def wls_simple(X, y, yerr):
"""
weighted least squares: (X.T*W*X)*beta = X.T*W*y
solution: beta = (X.T*X)^-1 * X.T *y
Note
----
wls solves single problems (n_problems=1)
BUT! is able to solve multiple-template (same error) problems
Parameters
----------
X: predictors
(n... | 6,503 |
def decorated1(debugger, args, exe_ctx, result, dict):
"""
Python command defined by @lldb.command
"""
print("hello from decorated1", file=result) | 6,504 |
def assert_metadata_equal(this, other):
"""Assert metadata `this` are equal to metadata `other`."""
assert this.standard_name == other.standard_name
assert this.long_name == other.long_name
assert this.var_name == other.var_name
assert this.units == other.units | 6,505 |
def _filter(dict_packages, expression):
"""Filter the dict_packages with expression.
Returns:
dict(rst): Filtered dict with that matches the expression.
"""
expression_list = ['(' + item + ')' for item in expression.split(',')]
expression_str = '|'.join(expression_list)
compiled_exp = ... | 6,506 |
def multi_index_tsv_to_dataframe(filepath, sep="\t", header_rows=None):
"""
Loads a multi-header tsv file into a :py:class:`pd.DataFrame`.
Parameters
----------
filepath : `str`
Path pointing to the tsv file.
sep : `str`, optional, default: '\t'
Character to use as the delim... | 6,507 |
def generate_summoner_tab_summoner(db, profile, ss):
"""
:type db: darkarisulolstats.lolstats.database.Database
"""
summoner = {}
for s in ss:
raw_summoner = db.summoners.get(s)
if "profileIconPath" not in summoner:
summoner["profileIconPath"] = data.DataDragon.get_profil... | 6,508 |
def enable_ini():
"""
Switch seapy to use all fields from ROMS hydrodynamics and COBALT ini fields
"""
enable()
seapy.roms.fields.update(ini_fields) | 6,509 |
def preserve_quotes (s):
"""
Removes HTML tags around greentext.
"""
return quot_pattern.sub(get_first_group, s) | 6,510 |
def test_init_logger(monkeypatch):
"""
Tests `init_logger()`.
"""
test_config_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)),
'test_config')
def mock_get_conf_path():
"""
Replaces the conf path with the one for mock confs in unit tests.
"""
... | 6,511 |
def with_whitespace_inside(expr):
""" Returns an expression that allows for whitespace inside, but not
outside the expression.
"""
return Combine(OneOrMore(expr | White(' ', max=1) + expr)) | 6,512 |
def save_plot(workdir, filename, create_workdir=False):
"""Accepts workdir specified as argument to main script and filename
- saves plot to disk."""
if create_workdir:
if workdir == os.getcwd():
os.makedirs(f'{workdir}/figures', exist_ok=True)
plt.savefig(f'{workdir}/figures... | 6,513 |
def similarity(item, user, sim_dict):
"""
similarity between an item and a user (a set of items)
"""
if user not in sim_dict or item not in sim_dict[user]:
return 0
else:
return sim_dict[user][item] | 6,514 |
def get_totalt_result(req_url):
"""[summary]
This gets all the results in INT from the specified query
Args:
req_url ([STR]): [The request query that decides the data]
"""
r = requests.get(req_url, headers=headers)
json = r.json()
return json['result']['totalHits'] | 6,515 |
def lorentz(x, FWHM, x0=0):
"""
Returns Lorentzian lineshape.
"""
return FWHM/2/np.pi*((x-x0)**2+(FWHM/2)**2)**-1 | 6,516 |
def _test_create_pd_detection(infos, tracking=False):
"""Creates a prediction objects file."""
assert tracking is False, "Not Supported Yet"
from waymo_open_dataset import dataset_pb2
from waymo_open_dataset import label_pb2
from waymo_open_dataset.protos import metrics_pb2
from waymo_open_data... | 6,517 |
def equal_axes(axes, xlim=True, ylim=True):
"""
adjust xlim and ylim to the min/ max of all axes
Parameters
----------
axes: list
axes to adjust
xlim : bool, optional
If true, adjust xlim.
ylim : bool, optional
If true, adjust ylim.
"""
axes = np.array(axes)... | 6,518 |
def count_collision(strMap: list[str], right: int, down: int) -> int:
"""Read the map and count how many tree would be encountered if someone start from the top left corner"""
mapWidth = len(strMap[0]) # All lines are assumed to have same width
xCoord, yCoord = right % mapWidth, down
count = 0
whi... | 6,519 |
def svn_config_find_group(*args):
"""svn_config_find_group(svn_config_t cfg, char key, char master_section, apr_pool_t pool) -> char"""
return _core.svn_config_find_group(*args) | 6,520 |
def _reduce_ticks(fig):
"""Reduce number of ticks by factor 1.5 if more than 4."""
# TODO: replace this by mpl built-in class
tick_reduc = 1.5
for axes in fig.get_axes():
if len(axes.get_xticks()) > 4:
axes.locator_params(
tight=False,
axis='x',
... | 6,521 |
def loading():
"""Loader"""
spinner = spinning_cursor()
while True:
sys.stdout.write(spinner.next())
sys.stdout.flush()
time.sleep(0.1)
sys.stdout.write('\b') | 6,522 |
def download_file(url: str, target_dir: Union[str, Path]) -> None:
""" """
target_path = Path(target_dir)
r = requests.get(url, stream=True)
http_status = r.status_code
if http_status != 200:
raise Exception("Request was not successfull. Got response {0}".format(http_status))
total = ... | 6,523 |
def run_delay():
"""Run the updater to call a function delayed."""
import time
import threading
from qtpy import QtWidgets
from qt_thread_updater import get_updater
app = QtWidgets.QApplication.instance() or QtWidgets.QApplication([])
text_edit = QtWidgets.QTextEdit()
text_edit.resize(... | 6,524 |
def _database_exists():
"""Checks for existence of database"""
_require_environment()
database = _get_database_name()
with settings(hide('warnings'), warn_only=True):
result = run(MYSQL_PREFIX % "\"SHOW DATABASES LIKE '%(NAME)s';\"" % database)
if database['NAME'] in result:
... | 6,525 |
def EncoderDecoder(d_model, d_ff, n_heads, dropout, layer_idx, mode,
ff_activation):
"""Transformer encoder-decoder layer.
The input is a triple (decoder_input, mask, encoder) where the mask is
created from the original source to prevent attending to the padding part
of the encoder.
Args:... | 6,526 |
def group_normalize(strokes):
""" normilize a multistroke drawing """
long_stroke = concat(strokes)
x_min = min(long_stroke.x)
x_max = max(long_stroke.x)
y_min = min(long_stroke.y)
y_max = max(long_stroke.y)
x_range = float(x_max-x_min)
y_range = float(y_max-y_min)
normalized_stroke... | 6,527 |
def _get_sa_bracket(myimt, saset):
"""
For a given SA IMT, look through the input SAs and return a tuple of
a) a pair of IMT strings representing the periods bracketing the given
period; or c) the single IMT representing the first or last period in
the input list if the given period is off the end o... | 6,528 |
def test_ceil_special_cases_one_arg_equal(arg1):
"""
Special case test for `ceil(x, /)`:
- If `x_i` is already integer-valued, the result is `x_i`.
"""
res = ceil(arg1)
mask = isintegral(arg1)
assert_exactly_equal(res[mask], (arg1)[mask]) | 6,529 |
def fb83(A, B, eta=1., nu=None):
"""
Generates the FB8 distribution using the orthogonal vectors A and B
where A = gamma1*kappa and B = gamma2*beta (gamma3 is inferred)
A may have not have length zero but may be arbitrarily close to zero
B may have length zero however. If so, then an arbitrary value... | 6,530 |
def replicate(pta, ptac, p0, coefficients=False):
"""Create a replicated residuals conditioned on the data.
Here pta is standard marginalized-likelihood PTA, and
ptac is a hierarchical-likelihood version of pta with
coefficients=True for all GPs. This function:
- calls utils.get_coefficients(pta, p... | 6,531 |
def screenshot_progressbar_horizontal(screenshot_on=True, theme='flatly'):
"""
Get screenshot for horizontal progressbars
"""
style = Style(theme)
window = style.master
window.title('ttkbootstrap')
window.geometry('600x400')
ss = Screenshot(window, '../images/progressbar_horizontal.png')... | 6,532 |
def load_danube() -> pd.DataFrame:
"""
The danube dataset contains ranks of base flow observations from the Global River Discharge
project of the Oak Ridge National Laboratory Distributed Active Archive Center (ORNL DAAC),
a NASA data center. The measurements are monthly average flow rate for two statio... | 6,533 |
def get_index(grid_mids, values):
"""get the index of a value in an array
Args:
grid_mids: array of grid centers
value: array of values
Returns:
indices
"""
diff = np.diff(grid_mids)
diff = np.concatenate((diff, diff[-1:]))
edges = np.concatenate((grid_mids-d... | 6,534 |
def validate_incoming_edges(graphs, param=None):
"""
In case a node of a certain type has more then a threshold of incoming
edges determine a possible stitches as a bad stitch.
"""
param = param or {}
res = {}
i = 0
for candidate in graphs:
res[i] = 'ok'
for node, values ... | 6,535 |
def test_calculate_part_stress_lambda_b_no_insulation():
"""calculate_part_stress_lambda_b() should raise an KeyError when passed an unknown
insulation ID."""
with pytest.raises(KeyError):
inductor.calculate_part_stress_lambda_b(1, 41, 85.77) | 6,536 |
def face_area(bounding_box, correction):
"""
Increase face area, to square format, face detectors are very close
clipping useless when you want to get whole head
Arguments: bounding box original, correction value
Returns: 4-element list - bounding box for expanded area (ints)
"""
x_1, y_1, ... | 6,537 |
def session_scope():
"""Provide a transactional scope around a series of operations."""
global ENGINE
global SESSION
if SESSION is None:
SESSION = sessionmaker(bind=ENGINE)
session = SESSION()
try:
yield session
except:
session.rollback()
raise
... | 6,538 |
def ut_to_dt(ut):
"""Converts a universal time in days to a dynamical time in days."""
# As at July 2020, TAI is 37 sec ahead of UTC, TDT is 32.184 seconds ahead of TAI.
return ut + 69.184/SEC_IN_DAY | 6,539 |
def _read_group_h5(filename: Path, groupname: str) -> ndarray:
"""Return group content.
Args:
filename: path of hdf5 file.
groupname: name of group to read.
Returns:
content of group.
"""
try:
with h5py.File(filename, 'r') as h5f:
data = h5f[groupname][()... | 6,540 |
def test_attach_file(request, browser_name):
"""Should provide a way to change file field value"""
browser = get_browser(browser_name)
request.addfinalizer(browser.quit)
file_path = os.path.join(
os.path.abspath(os.path.dirname(__file__)), "mockfile.txt"
)
browser.visit(EXAMPLE_APP)
... | 6,541 |
def check_genotype_data():
"""Checks if the genotype data is fully indexed"""
GENE_COUNT_TO_CHECK = 33341
SNP_COUNT_TO_CHECK = 10700998
gene_count = es.count('genotype',doc_type='genes')['count']
snps_count = es.count('genotype',doc_type='snps')['count']
if gene_count != GENE_COUNT_TO_CHECK:
... | 6,542 |
def _clip_bbox(min_y, min_x, max_y, max_x):
"""Clip bounding box coordinates between 0 and 1.
Args:
min_y: Normalized bbox coordinate of type float between 0 and 1.
min_x: Normalized bbox coordinate of type float between 0 and 1.
max_y: Normalized bbox coordinate of type float between 0 and 1.
max_... | 6,543 |
def hot(df, hot_maps, drop_cold=True, ret_hots_only=False, verbose=False):
"""
df: pd.DataFrame
hot_maps: list(dict)
hot_map: dict
key: str column in df
value: one_hot vector for unique row value
---
returns dataframe
"""
if verbose:
print(f"hot_df c... | 6,544 |
def menu(function_text):
"""
Decorator for plain-text handler
:param function_text: function which set as handle in bot class
:return:
"""
def wrapper(self, bot, update):
self.text_menu(bot, update)
function_text(self, bot, update)
return wrapper | 6,545 |
def create_delete_classes(system_id_or_identifier, **kwargs):
"""Create classes for a classification system.
:param system_id_or_identifier: The id or identifier of a classification system
"""
if request.method == "DELETE":
data.delete_classes(system_id_or_identifier)
return {'message'... | 6,546 |
def ajax_upload_key():
"""Ajax upload a functionary key. Key files are stored to the db in their
dictionary representation. """
functionary_key = request.files.get("functionary_key", None)
functionary_name = request.form.get("functionary_name", None)
if not functionary_name:
flash("Something went wrong: ... | 6,547 |
def test_reward_valid(env_name, reward_type, tmpdir):
"""Test output of reward function is appropriate shape and type."""
venv = util.make_vec_env(env_name, n_envs=1, parallel=False)
venv, tmppath = _make_env_and_save_reward_net(env_name, reward_type, tmpdir)
TRAJECTORY_LEN = 10
obs = _sample(venv.... | 6,548 |
def add_latents_to_dataset_using_tensors(args, sess, tensors, data):
""" Get latent representations from model.
Args:
args: Arguments from parser in train_grocerystore.py.
sess: Tensorflow session.
tensors: Tensors used for extracting latent representations.
data: Data used... | 6,549 |
def normal_distribution_parameter_estimation(data):
"""
Notice: Unbiased Estimation Adopted. Line 115.
:param data: a list, each element is a real number, the value of some attribute
eg: [0.46, 0.376, 0.264, 0.318, 0.215, 0.237, 0.149, 0.211]
:return miu: the estimation of miu of the normal... | 6,550 |
def map_icd_codes_to_categories(df, icd_version):
"""Append a column 'category' to df containing disease categories"""
# default label
df["category"] = OTHER_LABEL
# From ICD6 on we have numerical-only four-digit codes, categorization works
# on 3-digit codes only. Drop the last digit before left-p... | 6,551 |
def reverse_complement(seq):
"""
ARGS:
seq : sequence with _only_ A, T, C or G (case sensitive)
RETURN:
rcSeq : reverse complement of sequenced passed to it.
DESCRIPTION:
DEBUG:
Compared several sequences. Is working.
FUTURE:
"""
rcSeq = "" # Re... | 6,552 |
def compose(chosung, joongsung, jongsung=u''):
"""This function returns a Hangul letter by composing the specified chosung, joongsung, and jongsung.
@param chosung
@param joongsung
@param jongsung the terminal Hangul letter. This is optional if you do not need a jongsung."""
if jongsung is None... | 6,553 |
def generate_markdown_metadata(metadata_obj: Dict[str, str]) -> List[str]:
"""generate_markdown_metadata
Add some basic metadata to the top of the file
in HTML tags.
"""
metadata: List[str] = ["<!---"]
passed_metadata: List[str] = [
f" {key}: {value}" for key, value in metadata_obj... | 6,554 |
def fftshift(x:np.ndarray):
"""平移FFT频谱
FFT默认频谱不是关于零频率对称的,使用fftshift可以对调左右频谱。
:Parameters:
- x: 频谱序列
:Returns: 平移后的频谱
"""
N = x.size
return np.append(x[N//2:], x[:N//2]) | 6,555 |
def store_feature_vectors(dfs, output_dir):
"""
Write out all feature vector information to a csv file, to be read
later by the feature vector plotting script.
Parameters
----------
dfs : dict of DataFrame
Time series data for multiple sub-image locations.
output_dir : str
P... | 6,556 |
def get_20newsgroups_data(
train_test,
categories=None,
max_text_len: int = None,
min_num_tokens=0,
random_state=42,
) -> List[Tuple[str, str]]:
"""
'alt.atheism',
'comp.graphics',
'comp.os.ms-windows.misc',
'comp.sys.ibm.pc.hardware',
'comp.sys.mac.hardware',
'comp... | 6,557 |
def crossdomain(allowed_origins=None, methods=None, headers=None,
max_age=21600, attach_to_all=True,
automatic_options=True, credentials=False):
"""
http://flask.pocoo.org/snippets/56/
"""
if methods is not None:
methods = ', '.join(sorted(x.upper() for x in metho... | 6,558 |
def describe_my_user_profile():
"""
Describes a user\'s SSH information.
See also: AWS API Documentation
:example: response = client.describe_my_user_profile()
:rtype: dict
ReturnsResponse Syntax{
'UserProfile': {
'IamUserArn': 'string',
'Name': 'string',
... | 6,559 |
def get_gaussian_xyz(lines, optimized=True):
"""
Input orientation:
---------------------------------------------------------------------
Center Atomic Atomic Coordinates (Angstroms)
Number Number Type X Y Z
---------------... | 6,560 |
def make_vector_gradient(bcs: Boundaries) -> Callable:
""" make a discretized vector gradient operator for a cylindrical grid
|Description_cylindrical|
Args:
bcs (:class:`~pde.grids.boundaries.axes.Boundaries`):
|Arg_boundary_conditions|
Returns:
A function tha... | 6,561 |
def exec_in_terminal(command):
"""Run a command in the terminal and get the
output stripping the last newline.
Args:
command: a string or list of strings
"""
return check_output(command).strip().decode("utf8") | 6,562 |
def replace_string(original, start, end, replacement):
"""Replaces the specified range of |original| with |replacement|"""
return original[0:start] + replacement + original[end:] | 6,563 |
def edit_mod():
""" Admin endpoint used for sub transfers. """
if not current_user.is_admin():
abort(403)
form = EditModForm()
try:
sub = Sub.get(fn.Lower(Sub.name) == form.sub.data.lower())
except Sub.DoesNotExist:
return jsonify(status='error', error=[_("Sub does not exist... | 6,564 |
def get_current_info(symbol_list, columns='*'):
"""Retrieves the latest data (15 minute delay) for the
provided symbols."""
columns = ','.join(columns)
symbols = __format_symbol_list(symbol_list)
yql = ('select %s from %s where symbol in (%s)'
% (columns, FINANCE_TABLES['quotes'], symbo... | 6,565 |
def reavail_fulfillment_lines(fulfillment):
"""Return fulfilled skills to corresponding availabilitys."""
for line in fulfillment:
if line.task_line.variant and line.task_line.variant.track_inventory:
increase_availability(
line.task_line.variant, line.quantity, allocate=True... | 6,566 |
def dictionarify_recpat_data(recpat_data):
"""
Covert a list of flat dictionaries (single-record dicts) into a dictionary.
If the given data structure is already a dictionary, it is left unchanged.
"""
return {track_id[0]: patterns[0] for track_id, patterns in \
[zip(*item.items()) for ... | 6,567 |
def copy_assemble_template(files, distfolder, headersize, configfile, mainfile, examplefile):
"""
Copia y ensambla el template.
:param files: Lista de archivos
:param distfolder: Carpeta de distribución
:param headersize: Tamaño del header
:param configfile: Archivo de configs
:para... | 6,568 |
def _get_qualified_name(workflow_name, job_name):
"""Construct a qualified name from workflow name and job name."""
return workflow_name + _NAME_DELIMITER + job_name | 6,569 |
def normalize_each_time_frame(input_array):
"""
Normalize each time frame
- Input: 3D numpy array
- Output: 3D numpy array
"""
for i in range(input_array.shape[0]):
max_value = np.amax(input_array[i, :, :])
if max_value != 0:
input_array[i, :, :] = input_arr... | 6,570 |
def describe_raid_arrays(InstanceId=None, StackId=None, RaidArrayIds=None):
"""
Describe an instance\'s RAID arrays.
See also: AWS API Documentation
Exceptions
:example: response = client.describe_raid_arrays(
InstanceId='string',
StackId='string',
RaidArrayIds=[
... | 6,571 |
def pre_process_data(full_data):
"""
pre process data- dump invalid values
"""
clean_data = full_data[(full_data["Temp"] > -10)]
return clean_data | 6,572 |
def invalid_grant(_):
"""Handles the Invalid Grant error when doing Oauth
"""
del current_app.blueprints['google'].token
flash(("InvalidGrant Error"), category="danger")
return redirect(url_for('index')) | 6,573 |
def diff_text(a, b):
"""
Performs a diffing algorithm on two pieces of text. Returns
a string of HTML containing the content of both texts with
<span> tags inserted indicating where the differences are.
"""
def tokenise(text):
"""
Tokenises a string by spliting it into individual... | 6,574 |
def require_reset_password():
"""
请求重设密码
参数:
{
"identifier":"用户识别符"
}
返回:
{
"code":0,//非0表示调用成功
"message":"qwq"//code非0的时候表示错误信息
}
"""
if config.USE_PHONE_WHEN_REGISTER_AND_RESETPASSWD:
return make_response... | 6,575 |
def encrypt(key, src_file_path, encrypted_file_path):
"""
Encrypts the specified source file to the target path using AES and the
specified RSA key
:param key: an RSA key
:param src_file_path: str path of file to be encrypted
:param encrypted_file_path: str path of target encrypted file
:ret... | 6,576 |
def coerce_affine(affine, *, ndim, name=None):
"""Coerce a user input into an affine transform object.
If the input is already an affine transform object, that same object is returned
with a name change if the given name is not None. If the input is None, an identity
affine transform object of the give... | 6,577 |
def unpack(url, sha256, compression, unpack_location='.'):
"""Fetch a remote archive, check its hash and decompress it
Download the file ``url``, ensure its hash matches the ``sha256`` argument,
then decompress it using ``compression`` method (either 'tar' or 'zip').
The unpacked files will be written ... | 6,578 |
def provider_filtered_machines(request, provider_uuid,
identity_uuid, request_user=None):
"""
Return all filtered machines. Uses the most common,
default filtering method.
"""
identity = Identity.objects.filter(uuid=identity_uuid)
if not identity:
raise Obj... | 6,579 |
def build_pin_dict(fp, filepath):
""" build a dictionary with pins and their aliases for one pic
and print the dictionary
"""
dom = parse(filepath) # load .pic file
pinlist = {} # new dictionary
i = 1 ... | 6,580 |
def known_peaks():
"""Return a list of Peak instances with data (identified)."""
peak1 = Peak(
name="Test1Known",
r_time=5.00,
mz=867.1391,
charge="+",
inchi_key="IRPOHFRNKHKIQA-UHFFFAOYSA-N",
)
peak2 = Peak(
name="Test2Known",
r_time=8.00,
... | 6,581 |
def explode_sheet_music(sheet_music):
"""
Splits unformatted sheet music into formated lines of LINE_LEN_LIM
and such and returns a list of such lines
"""
split_music = sheet_music.split(',')
split_music = list(map(lambda note: note+',', split_music))
split_list = []
counter = 0
line... | 6,582 |
def norm_coefficient(m, n):
"""
Calculate the normalization coefficient for the (m, n) Zernike mode.
Parameters
----------
m : int
m-th azimuthal Zernike index
n : int
n-th radial Zernike index
Returns
-------
norm_coeff : float
Noll normalization coefficien... | 6,583 |
def get_waveglow(ckpt_url):
"""
Init WaveGlow vocoder model with weights.
Used to generate realistic audio from mel-spectrogram.
"""
wn_config = {
'n_layers': hp.wg_n_layers,
'n_channels': hp.wg_n_channels,
'kernel_size': hp.wg_kernel_size
}
audio_config = {
... | 6,584 |
def user_exists(keystone, user):
"""" Return True if user already exists"""
return user in [x.name for x in keystone.users.list()] | 6,585 |
def gen_cartesian_product(*args: List[Dict]) -> List[Dict]:
""" generate cartesian product for lists
生成笛卡尔积,估计是参数化用的
Args:
args (list of list): lists to be generated with cartesian product
Returns:
list: cartesian product in list
Examples:
>>> arg1 = [{"a": 1}, {"a": ... | 6,586 |
def nxclass_handler(validator, v_item):
"""validate @NX_class"""
nx_class = utils.decode_byte_string(v_item.h5_object)
nxdl = validator.manager.classes.get(nx_class)
if nxdl is None:
c = "not a recognized NXDL class: " + nx_class
status = finding.ERROR
elif isBaseClassNXDL(nxdl):
... | 6,587 |
def get_234_df(x):
"""
This function get the dataframe for model2.1,2.2,2.3
input: x, the col we want
output: the dataframe only for x
"""
styles = pd.read_csv("styles.csv", error_bad_lines=False)
styles = styles.drop(["productDisplayName"],axis = 1)
styles = styles.drop(["year"],axis = 1)
sty... | 6,588 |
def appropriate_bond_orders(params, smrts_mol, smrts):
"""Checks if a SMARTS substring specification has appropriate bond orders
given the user-specified mode.
:param params: A dictionary of the user parameters and filters.
:type params: dict
:param smrts_mol: RDKit mol object of the SMARTS string.... | 6,589 |
def open_images_folder():
"""
Opens the `images` directory in the native file explorer, if supported
"""
images_folder = base_dir() + '/images'
if not os.path.exists(images_folder):
os.makedirs(images_folder)
subprocess.call(["open", images_folder]) | 6,590 |
def train(model, train_path, val_path, steps_per_epoch, batch_size,
records_path):
"""
Train the Keras graph model
Parameters:
model (keras Model): The Model defined in build_model
train_path (str): Path to training data
val_path (str): Path to validation data
steps... | 6,591 |
def verify_single_host(host, ip):
"""
Simple function to verify only a single host returned from query.
If no hosts, or multiple hosts are returned, an error message is printed
and the program exits.
"""
if len(host) == 0:
print("Error: No host with IP address {} was found".format... | 6,592 |
def Mcnu_to_m1m2(Mc, nu):
"""Convert chirp mass, symmetric mass ratio pair to m1, m2"""
q = nu_to_q(nu)
M = Mcq_to_M(Mc, q)
return Mq_to_m1m2(M, q) | 6,593 |
def _AccumulateActions(args):
"""Given program arguments, determines what actions we want to run.
Returns [(ResultsReportCtor, str)], where ResultsReportCtor can construct a
ResultsReport, and the str is the file extension for the given report.
"""
results = []
# The order of these is arbitrary.
if args.... | 6,594 |
def test_profile_rate_attr(mock_board) -> None:
"""Side effects of changing the rate profile via its attribute."""
profile = Profile(board=mock_board)
assert profile.rate == 1
assert profile._state == Profile.SyncedState.UNFETCHED
assert profile._profile_tracker == (1, 1)
profile.rate = 2
as... | 6,595 |
def build_predictions_dictionary(data, class_label_map):
"""Builds a predictions dictionary from predictions data in CSV file.
Args:
data: Pandas DataFrame with the predictions data for a single image.
class_label_map: Class labelmap from string label name to an integer.
Returns:
Dictionary with key... | 6,596 |
def create_file_link(link_id, file_id, parent_share_id, parent_datastore_id):
"""
DB wrapper to create a link between a file and a datastore or a share
Takes care of "degenerated" tree structures (e.g a child has two parents)
In addition checks if the link already exists, as this is a crucial part of ... | 6,597 |
def deptree(lines):
"""Build a tree of what step depends on what other step(s).
Test input becomes
{'A': set(['C']), 'C': set([]), 'B': set(['A']),
'E': set(['B', 'D', 'F']), 'D': set(['A']),
'F': set(['C'])}
A depends on C
B depends on A
C depends on nothing (starting point)
D ... | 6,598 |
def get_datetime(time_str, model="0"):
"""
时间格式化 '20200120.110227'转为'2020-01-20 11:02:27'
返回一个datetime格式
"""
if model == "0":
time_str = get_time(time_str)
time = datetime.datetime.strptime(time_str, "%Y-%m-%d %H:%M:%S")
return time | 6,599 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.