content stringlengths 22 815k | id int64 0 4.91M |
|---|---|
def convert_operand_kind(operand_tuple):
"""Returns the corresponding operand type used in spirv-tools for the given
operand kind and quantifier used in the JSON grammar.
Arguments:
- operand_tuple: a tuple of two elements:
- operand kind: used in the JSON grammar
- quantif... | 10,100 |
def setNumIterations(numIter):
"""
Sets the number of times an iterative constraint solver is repeated.
Increasing the number of iterations improves the constraint solver at the cost of performances & the speed of the game engine.
@param numIter: The number of timesubsteps. (Input 0 to suspend simulation numSub... | 10,101 |
def screen_missing_data(database,subject,begin=None,end=None):
""" Returns a DataFrame contanining the percentage (range [0,1]) of loss data
calculated based on the transitions of screen status. In general, if
screen_status(t) == screen_status(t+1), we declared we have at least one
missing point.
P... | 10,102 |
def check(text):
"""Check the text."""
error_code = "example.first"
msg = "First line always has an error."
reverse(text)
return [(1, 1, error_code, msg)] | 10,103 |
def figure(**kwargs):
"""
Create a new figure with the given settings.
Settings like the current colormap, title or axis limits as stored in the
current figure. This function creates a new figure, restores the default
settings and applies any settings passed to the function as keyword
arguments... | 10,104 |
async def test_get_event_format_no_authorization(
client: _TestClient, mocker: MockFixture, event_format_interval_start: dict
) -> None:
"""Should return 401 Unauthorized."""
EVENT_ID = "event_id_1"
mocker.patch(
"event_service.adapters.event_format_adapter.EventFormatAdapter.get_event_format",
... | 10,105 |
def parse(content: str, target: str = "all") -> List[Inline]:
"""Parses an HTML document and extracts."""
soup = BeautifulSoup(content, "html.parser")
if target == "all":
search_queries = chain(*_VALID_TARGETS.values())
elif target in _VALID_TARGETS.keys():
search_queries = chain(_VALID... | 10,106 |
def array_ravel(arr):
"""Flatten a C/F array into a 1D array without enforcing the ordering of
the each element.
Args
----
arr: array
Returns
-------
A flattened 1D array
"""
raise NotImplementedError | 10,107 |
def create_output(verified_specific_headers_list:list) -> str:
""" Design Output """
if args.verbose is True:
print("[!] INFO: Outputting Specific Header Information")
return_output = ""
for specific_header in verified_specific_headers_list:
split_header = specific_header.split(":")
... | 10,108 |
def registered_response_data():
"""Body (bytes) of the registered response."""
return b"response data" | 10,109 |
def generate_pages(prefix='', **kwargs):
"""
This function creates a paginator and yields one page at a time.
:param prefix: the prefix (starting under the bucket) of the key name
:return: one page of contents
"""
bucket = kwargs.get('bucket', BUCKET_DEFAULT)
check_arg_bucket(bucket)
s... | 10,110 |
def parse_log(log_file):
"""
Parses a log file into a list of lists containing the messages logged
:param log_file: path-like: Path to the log file
:return: list of lists containing messages in the log file
"""
parsed_logs = [[] for i in range(5)]
with open(log_file, 'r') as f:
for l... | 10,111 |
def print_build_cmds(cliargs, params):
"""Generate bash-code with `docker build` commands."""
print("""set -e
set echo off
echo "minikan dockerization - Start"
""")
if "builder" in cliargs:
print(_build_builder(params))
if "all" in cliargs or "base" in cliargs:
print(_build_base(params... | 10,112 |
def print_mask_info(data):
"""
Print mask info, including locations of not null masks.
Args:
data (Xarray dataset): File containing masks.
Returns:
Printed statements of total number of times (3-hourly for CESM CAM files)
containing mcss.
"""
print("number of ma... | 10,113 |
def generate_experiment_fn(train_files,
eval_files,
num_epochs=None,
train_batch_size=40,
eval_batch_size=40,
embedding_size=8,
first_layer_size=100,
... | 10,114 |
def get_inclination_and_azimuth_from_locations(self, locations):
"""
self must to point to Main_InputWindow
"""
"""
Return "Inc" and "Azi" array objects in reference units.
"""
Inc = []
Azi = []
for MD in locations:
tangentVector = get_ASCT_from_MD(self, MD)
verticalVector = np.array([0.0,0.0,1.0,0.0])
i... | 10,115 |
def _ensure_package(base, *parts):
"""Ensure that all the components of a module directory path exist, and
contain a file __init__.py."""
bits = []
for bit in parts[:-1]:
bits.append(bit)
base.ensure(*(bits + ['__init__.py']))
return base.ensure(*parts) | 10,116 |
def make_cat_matrix(n_rows: int, n_cats: int) -> tm.CategoricalMatrix:
"""Make categorical matrix for benchmarks."""
mat = tm.CategoricalMatrix(np.random.choice(np.arange(n_cats, dtype=int), n_rows))
return mat | 10,117 |
def add_stabilizer_nodes(boundaries_raw, electrodes, nr_nodes_between):
"""
Segmentation of nodes:
we have the existing nodes
N.F is the ratio of required nodes and existing nodes
first, add N nodes to each segment
then, add one more node to the F first segments
* assume ord... | 10,118 |
def test_find_by_username(session, client, jwt):
"""Assert that user find by username is working as expected."""
user = User.find_by_username(TEST_TOKEN['username'])
if not user:
User.create_from_jwt_token(TEST_TOKEN, 'PS12345')
user = User.find_by_username(TEST_TOKEN['username'])
asser... | 10,119 |
def adjust_learning_rate(optimizer, lr_init, epoch):
"""decrease the learning rate at 160 and 180 epoch ( from LDAM-DRW, NeurIPS19 )"""
lr = lr_init
if epoch < 5:
lr = (epoch + 1) * lr_init / 5
else:
if epoch >= 160:
lr /= 100
if epoch >= 180:
l... | 10,120 |
def read_data(creds):
"""Read court tracking data in and drop duplicate case numbers"""
# try:
df = gsheet.read_data(gsheet.open_sheet(gsheet.init_sheets(creds),"01_Community_lawyer_test_out_final","Frontend"))
# df.drop_duplicates("Case Number",inplace=True) #Do we want to drop duplicates???
retu... | 10,121 |
def _build_indie_lyrics(
root: str, num_workers: int = 8, max_size: int = 200000
) -> DocumentArray:
"""
Builds the indie lyrics dataset. Download the CSV files from:
https://www.kaggle.com/datasets/neisse/scrapped-lyrics-from-6-genres
:param root: the dataset root folder.
:param num_workers: th... | 10,122 |
def getHostname(request):
"""
Utility method for getting hostname of client.
"""
if request.getClientIP() in LOOPBACK_ADDRESSES and has_headers(request, X_FORWARDED_FOR):
# nginx typically returns ip addresses
addr = get_headers(request, X_FORWARDED_FOR)
if isIPAddress(addr):
... | 10,123 |
def CollapseSolutionPosition(x,x0):
"""
Calculate a free-fall collapse solution
x - position to calculate time at in cm
x0 - initial position in cm
Sam Geen, March 2018
"""
X = x/x0
t = (np.arccos(np.sqrt(X)) + np.sqrt(X * (1.0-X))) * x0**1.5 / np.sqrt(2.0*units.G*gravity.centralmass)
... | 10,124 |
def main():
"""RUN DECODING."""
parser = argparse.ArgumentParser()
# decode setting
parser.add_argument("--feats", required=True,
type=str, help="list or directory of aux feat files")
parser.add_argument("--checkpoint", required=True,
type=str, help="m... | 10,125 |
def scaleSpectralSky_cor(subframe, badpixelmask=None, maxshift=20, fitwidth=2, pord=1, nmed=3, dispaxis=0, spatial_index=None, refpix=None, tord=2):
"""
Use cross-correlation to subtract tilted sky backgrounds.
subframe : NumPy array
data subframe containing sky data to be subtracted (and,
perh... | 10,126 |
def login():
"""
Implements the login feature for the app.
Errors are shown if incorrect details are used. If the user tried
to access a page requiring login without being authenticated,
they are redirected there after sign in.
"""
if current_user.is_authenticated:
return redirect(u... | 10,127 |
def _get_plot_axes(grid):
"""Find which axes are being plotted.
Parameters
----------
grid : Grid
Returns
-------
tuple
"""
plot_axes = [0, 1, 2]
if np.unique(grid.nodes[:, 0]).size == 1:
plot_axes.remove(0)
if np.unique(grid.nodes[:, 1]).size == 1:
plot_ax... | 10,128 |
def make_parser(inheritable=False):
"""Make parser.
Parameters
----------
inheritable: bool
whether the parser can be inherited from (default False).
if True, sets ``add_help=False`` and ``conflict_hander='resolve'``
Returns
-------
parser: ArgumentParser
"""
parse... | 10,129 |
def _read_uint(addr):
""" Read a uint """
value = gdb.parse_and_eval("*(unsigned int*)0x%x" % addr)
try:
if value is not None:
return _cast_uint(value)
except gdb.MemoryError:
pass
print("Can't read 0x%x to lookup KASLR uint value" % addr)
return None | 10,130 |
def line_at_infinity(n):
"""the line at infinity just contains the points at infinity"""
return points_at_infinity(n) | 10,131 |
def check_load(work, varname, warning=lambda x: print(x)):
"""Check conditions on load"""
if not hasattr(work, "scholar_id"):
warning("[Warning] Work {} does not have scholar_id".format(varname))
if getattr(work, "place", None) is None:
warning("[Error] Work {} does not have place".format(va... | 10,132 |
def check_section(config:Namespace, name:str) -> Namespace:
"""Check that a section with the specified name is present."""
section = config._get(name)
if section is None:
raise ConfigurationError(f"Section {name} not found in configuration")
if not isinstance(section, Namespace):
raise ... | 10,133 |
def ceil(a):
"""The ceil function.
Args:
a (Union[:class:`~taichi.lang.expr.Expr`, :class:`~taichi.lang.matrix.Matrix`]): A number or a matrix.
Returns:
The least integer greater than or equal to `a`.
"""
return _unary_operation(_ti_core.expr_ceil, math.ceil, a) | 10,134 |
def Body(
default: Any = Undefined,
*,
default_factory: Optional[NoArgAnyCallable] = None,
alias: str = None,
title: str = None,
description: str = None,
const: bool = None,
gt: float = None,
ge: float = None,
lt: float = None,
le: float = None,
multiple_of: float = None,... | 10,135 |
def process_name(row):
"""
this is a function to process name
"""
substring = row['title']
if substring is None:
return
if len(substring) > 0 and substring[0].isalpha():
name_set.add(substring) | 10,136 |
def square(array, x, y, size, mag):
"""For each diamond in the array, set the midpoint of that diamond to be
the average of the four corner points plus a random value. """
x1 = x - size
y1 = y - size
x2 = x + size
y2 = y + size
div = 4.0
l = len(array)
if x1 >= 0:
a = ar... | 10,137 |
def warp(img, pers_margin=425, margin_bottom=50, margin_top=450, margin_sides=150, reverse=False):
"""
This function warps an image. For the transformation a src polygon and a destination
polygon are used. The source polygon is calculated by the image shape and the margins
given. The destination polygon... | 10,138 |
def align(sx, sy):
""" Align two groups of sentences
:param sx:
:param sy:
:param sx:
:param sy:
"""
cx = map(char_length, sx)
cy = map(char_length, sy)
# noinspection PyTypeChecker
for (i1, i2), (j1, j2) in reversed(list(_align(cx, cy))):
yield ' '.join(sx[i1:i2]), ' '.j... | 10,139 |
def get_all_doorstations(hass):
"""Get all doorstations."""
return [
entry[DOOR_STATION]
for entry in hass.data[DOMAIN].values()
if DOOR_STATION in entry
] | 10,140 |
def test_runtime_config_attribute_cvejob_cpe2pkg_path():
"""Check the attributes handling for a class RuntimeConfig."""
old_value = unset_environment_variable('CVEJOB_CPE2PKG_PATH')
config = RuntimeConfig()
assert config._config.cpe2pkg_path == 'cpe2pkg.jar'
os.environ['CVEJOB_CPE2PKG_PATH'] = 'cp... | 10,141 |
def get_weather() -> dict:
"""Makes an api request for the weather api
country code queries the specific country
city name queries the specific city within that country
units determines the type of numerical data returned (centigrade or Fahrenheit)
:return: the response from the api
"""
que... | 10,142 |
def _create_off_value():
"""create off value"""
return Tensor(0.0, mstype.float32) | 10,143 |
def is_datetime(value):
"""
Check if an object is a datetime
:param value:
:return:
"""
result = False
if isinstance(value, datetime.datetime):
result = True
# else:
# result = is_datetime_str(str(value))
return result | 10,144 |
def sigmoid(z):
"""Sigmoid function"""
if z > 100:
return 0
return 1.0 / (1.0 + math.exp(z)) | 10,145 |
async def get_company_sumary(symbol: str, db: Session = Depends(get_db)):
"""
This method receibe a symbol, if does not exits in our database
go to extract data, save it on our database and retunr the
stored data
"""
company_solver = CompanySolver(company_symbol=symbol)
_ = company_solver.g... | 10,146 |
def make_dataloaders(params: MinkLocParams, debug=False):
"""
Create training and validation dataloaders that return groups of k=2 similar elements
:param train_params:
:param model_params:
:return:
"""
datasets = make_datasets(params, debug=debug)
dataloders = {}
train_sampler = Ba... | 10,147 |
def hilbert(n, x0, y0, xi, xj, yi, yj):
"""Generate a Hilbert curve.
This function returns a generator that yields the (x,y) coordinates
of the Hilbert curve points from 0 to 4^n-1.
Arguments:
n -- the base-4 logarithm of the number of points (ie. the function generates 4^n points).
x0, y... | 10,148 |
def show_images(image_one, image_two, image_three):
"""
Plots three images with label and shape attributes
"""
f, (ax1, ax2, ax3) = plt.subplots(1, 3, figsize=(20,10))
ax1.set_title("%s with shape %s" % (image_one[1], image_one[0].shape))
ax1.imshow(image_one[0])
ax2.set_title("%s with shape... | 10,149 |
def get_attn_pad_mask(seq_q, seq_k):
"""
由于各句子长度不一样,故需要通过PAD将所有句子填充到指定长度;
故用于填充的PAD在句子中无任何含义,无需注意力关注;
注意力掩码函数,可用于屏蔽单词位置为PAD的位置,将注意力放在其他单词上。
:param seq_q: [batch_size, seq_len]
:param seq_k: [batch_size, seq_len]
"""
batch_size, len_q = seq_q.size()
_, len_k = seq_k.size()... | 10,150 |
def sync_gcp_projects(neo4j_session, projects, gcp_update_tag, common_job_parameters):
"""
Load a given list of GCP project data to Neo4j and clean up stale nodes.
:param neo4j_session: The Neo4j session
:param projects: List of GCP projects; output from crm.get_gcp_projects()
:param gcp_update_tag:... | 10,151 |
def convert_str_to_float(string):
"""Convert str to float
To handle the edge case
Args:
string (str): string
Returns:
f (float): float value
"""
try:
f = float(string)
except Exception:
f = np.nan
return f | 10,152 |
def node_tree(node: str):
"""Format printing for locate"""
str2list = list(node.replace(' ', ''))
count = 0
for i, e in enumerate(str2list):
if e == '(':
count += 1
str2list[i] = '(\n{}'.format('| ' * count)
elif e == ')':
count -= 1
str2... | 10,153 |
def gen_sensor_summary_report(config_file, sensor, output_file=None):
"""
A function which generates a summary report for a given sensor. Report includes info such
as the file size, download times, number of scenes etc.
:param config_file: The EODataDown configuration file path.
:param sensor: The ... | 10,154 |
def _check_info_id_is_in_list(
expected_info_id: int, info_list: List[dict]) -> None:
"""
Check that the target info id is included in the list.
Parameters
----------
expected_info_id : int
The expected info id included in the list.
info_list : list of dicts
A list of ch... | 10,155 |
def random_init_checkpoint(param_name, is_weight, tar_size, checkpoint, args):
"""Either remove the final layer weights for fine-tuning on novel dataset
or append randomly initialized weights for the novel classes.
Note: The base detector for LVIS contains weights for all classes, but only
the weights ... | 10,156 |
def stream_changelog_sections(
target_filename, config_filename, receive_sections, version=None
):
"""Send individual changelog sections to a callable, one per version.
The callable accepts two arguments, the string version number of the
changelog section, and the markdown-formatted content of the chan... | 10,157 |
def process_data(data):
"""
:param datas:
:param args:
:return:
"""
# copy of the origin question_toks
for d in datas:
if 'origin_question_toks' not in d:
d['origin_question_toks'] = d['question_toks']
for entry in datas:
entry['question_toks'] = symbol_filt... | 10,158 |
def concat(
adatas: Union[Collection[AnnData], "typing.Mapping[str, AnnData]"],
*,
axis: Literal[0, 1] = 0,
join: Literal["inner", "outer"] = "inner",
merge: Union[StrategiesLiteral, Callable, None] = None,
uns_merge: Union[StrategiesLiteral, Callable, None] = None,
label: Optional[str] = No... | 10,159 |
def repeated_parity_data_binning(shots, nr_of_meas:int):
"""
Used for data binning of the repeated parity check experiment.
Assumes the data qubit is alternatively prepared in 0 and 1.
Args:
shots (1D array) : array containing all measured values of 1 qubit
nr_of_meas (int) : number of ... | 10,160 |
def get_user_activities(user_id, timestamp_start, timestamp_end):
""" Returns the activities for a user, between two times"""
activities = Activity.query \
.filter(Activity.user_id == user_id) \
.filter(Activity.timestamp_end >= timestamp_start) \
.filter(Activity.timestamp_start <= tim... | 10,161 |
def _disable(recipes):
"""Disable the given recipe in the link"""
for recipe in recipes:
with json_file(
config.recipe.profile.link_location(recipe)
) as values:
values["enabled"] = False
LOGGER.status("Disabling the link file of {} ({})".format(recipe, config... | 10,162 |
def create_feature_from_floor(train_df, test_df):
"""
Also the next important variables from EDA are floor and max_floor. So let us create two variables
1. Floor number of the house to the total number of floors
2. Number of floor from the top
"""
# floor of the house to the total ... | 10,163 |
def _find_matches(ref, pred):
""" find potential matches between objects in the reference and
predicted images. These need to have at least 1 pixel of overlap.
"""
matches = {}
for label in ref.labels:
mask = ref.labeled == label
matches[label] = [m for m in np.unique(pred.labeled[ma... | 10,164 |
def _process(config: ConfigType, should_make_dir: bool) -> ConfigType:
"""Process the config
Args:
config (ConfigType): Config object
should_make_dir (bool): Should make dir for saving logs, models etc
Returns:
[ConfigType]: Processed config
"""
config = _process_general_c... | 10,165 |
def calendar_heatmap_echarts(data_frame: pd.DataFrame, date_field: str = None, value_field: str = None,
title: str = "",
width: str = "100%", height: str = "300px") -> Echarts:
"""
日历热度图,显示日期热度
:param data_frame:
:param date_field: 日期列
:param... | 10,166 |
def adj_to_edge_indices(adj: Union[torch.Tensor, np.ndarray]) -> Union[torch.Tensor, np.ndarray]:
"""
Args:
adj: a (N, N) adjacency matrix, where N is the number of nodes
Returns:
A (2, E) array, edge_idxs, where E is the number of edges,
and edge_idxs[0], edge_idxs[1] are t... | 10,167 |
def test_git_url_top_level_url_versions(mock_packages, config):
"""Test URL fetch strategy inference when url is specified with git."""
pkg = spack.repo.get('git-url-top-level')
# leading 62 zeros of sha256 hash
leading_zeros = '0' * 62
fetcher = spack.fetch_strategy.for_package_version(pkg, '2.0... | 10,168 |
def create_export_settings_window():
"""
This function contains all the logic of the export settings window and will run the window by it's own.
:return: None
"""
window = sg.Window("Export Settings", generate_export_settings_layout(), modal=True, finalize=True,
keep... | 10,169 |
def render_string(s: str, *, args: Dict = None, as_json: bool = False) -> Iterator[str]:
"""Render each document from a string and return each rendered string one by one."""
if not args:
args = {}
for node in _parse_string(s):
yield _render(node, args=args, as_json=as_json) | 10,170 |
def get_vertex_list(session, node_id, part_info):
"""Wrapper for HAPI_GetVertexList
Args:
session (int): The session of Houdini you are interacting with.
node_id (int): The node to get.
part_info (PartInfo): Part info of querying
Returns:
np.ndarray: Array of vertices
"... | 10,171 |
async def list(zeroconf=None, tdm_addr=None, tdm_port=None, password=None,
robot_id=None, robot_name=None,
timeout=5):
"""Display a list of all the robots.
Arguments:
tdm_addr - TDM address as a string (default: as in start())
tdm_port - TDM TCP port number (defaul... | 10,172 |
def lamb1(u,alpha=.5):
"""Approximate the Lambert W function.
Approximate the Lambert W function from its upper and lower bounds.
The parameter alpha (between 0 and 1) determines how close the
approximation is to the lower bound instead of the upper bound.
:arg float u: Modified argument o... | 10,173 |
def undeploy(c):
"""Uninstall package on remote hosts(s)"""
package = 'slacm'
sudo(c,'pip3 uninstall -y %s' % package) | 10,174 |
def loglikelihood(time_steps: list) -> float:
"""Calculate the log-likelihood of the time steps from the estimation
Parameters
----------
time_steps : list
estimation time steps
Returns
-------
float
log-likelihood
"""
loglikelihood = 0
for time_step in time_st... | 10,175 |
def write_conv_msg(n, nMax, Error, fName, State, ConvCrit, totTime):
"""Write convergence status message at the end of conv. file."""
if State.upper() == "STEADY":
if n == nMax and Error > ConvCrit:
msg = write_steadyst_notconv_msg(nMax)
save_msg(msg, fName)
elif Er... | 10,176 |
def get_r0_rm_rp(s, i_delta):
""" compute 3 points r0, r_minus and r_plus to determine apsis
compute these at s.i-i_delta and s.i-2*i_delta
"""
xp = s.Xlast[:, s.i % s.save_last]
x0 = s.Xlast[:, (s.i - i_delta) % s.save_last]
xm = s.Xlast[:, (s.i - 2 * i_delta) % s.save_last]
r... | 10,177 |
def _add_missing_scheduler_data(
sys_map, parsed_map, timestamps, node, node_ip_mapping, ignore_exception
):
"""
Add missing IO scheduler details into parsed_map.
"""
if not sys_map or "scheduler" not in sys_map:
return
scheduler_map = {}
scheduler_map[node] = {}
scheduler_map... | 10,178 |
def make_atomic(last, **rows):
"""
Unify related table instances/row, including: ids, dir, and dfile
Parameters
----------
last : obspy.AttributeDict
{'keyvalue': lastid instance, ...}
rows : dict
{'canonical tablename': [list of row instances], ...}
These row instances ... | 10,179 |
def setup_preview(parent):
"""Creates preview window in the UI and connects a callback on the specified tab.
Args:
parent (App(QDialog)): Object corresponding to the parent UI element.
"""
tag = parent.tag
dlg = parent.dlg
btn_log_clear = getattr(dlg, f"btn_{tag}_log_clear", None)
t... | 10,180 |
def create_tf_example(image,
image_dir,
seg,
seg_dir):
"""Converts image and annotations to a tf.Example proto.
Args:
image: dict with keys: [u'license', u'file_name', u'coco_url', u'height',
u'width', u'date_captured', u'flickr_... | 10,181 |
def test_get_assigned_name_simple():
"""
Test `magic.get_assigned_name()` in various use cases.
"""
obj = type('', (), {})
foo = get_assigned_name(_getframe())
assert_equals("foo", foo)
spam = [get_assigned_name(_getframe())] + ["bar"]
assert_equals("spam", spam[0])
obj.eggs = (lambda: get_assigne... | 10,182 |
def build_rfb_lite(base, feature_layer, mbox, num_classes):
"""Receptive Field Block Net for Accurate and Fast Object Detection for embeded system
See: https://arxiv.org/pdf/1711.07767.pdf for more details.
"""
base_, extras_, norm_, head_ = add_extras(base(), feature_layer, mbox, num_classes, versi... | 10,183 |
def bfunsmat(u, p, U):
"""Computes a matrix of the form :math:`B_{ij}`, where
:math:`i=0\\ldots p` and for each :math:`j` th column the
row :math:`i` of the matrix corresponds to the value of
:math:`(\\mathrm{span}(u_j)-p+i)` th bspline basis function at
:math:`u_j`.
Parameters:
u (np.a... | 10,184 |
def make_directory(path: str):
"""..."""
save_directory = os.path.dirname(path)
if not os.path.exists(save_directory):
os.makedirs(save_directory) | 10,185 |
def plot_clustermap(foldchanges, pvalues, threshold=0.05, row_cluster=True, dendogram=True, file_path=None):
"""Simple function to plot clustermap of foldchanges and pwms (without annotation); function will filter foldchanges whenever pvalues are significant.
Args:
foldchanges (np.array): matrix of fol... | 10,186 |
def getFourgram(words, join_string):
"""
Input: a list of words, e.g., ['I', 'am', 'Denny', 'boy']
Output: a list of trigram, e.g., ['I_am_Denny_boy']
I use _ as join_string for this example.
"""
assert type(words) == list
L = len(words)
if L > 3:
lst = []
... | 10,187 |
def get_age_carbon_14_dating(carbon_14_ratio):
"""Returns the estimated age of the sample in year.
carbon_14_ratio: the percent (0 < percent < 1) of carbon-14
in the sample conpared to the amount in living
tissue (unitless). """
if isinstance(carbon_14_ratio, str):
raise TypeError("Please ... | 10,188 |
def test_units_callnumbers_star_imports():
"""Star imports for the ``units.callnumbers`` package should work
without raising errors."""
from context import pycallnumber
all_imp = __import__('pycallnumber.units.callnumbers', globals(),
locals(), ['*'])
assert all_imp.LC
a... | 10,189 |
def process_factor(seqlet_H, seqlet_W, seqlet_dna, feature_mask, out_prefix, background_fasta, align_seqlets_shift, meme_db=None):
"""Perform all analyses on one factor."""
print(out_prefix)
# write coef vector
write_factor(seqlet_H, feature_mask, '%s_coef.txt' % out_prefix)
# plot logo
plot_l... | 10,190 |
def test_get_default_config_location_default(monkeypatch):
"""Assert that, if no file is found, the most-specific location is returned."""
monkeypatch.setattr(chromaterm.__main__, 'CONFIG_LOCATIONS', ['1', '2'])
assert chromaterm.__main__.get_default_config_location() == '1.yml' | 10,191 |
def get_input_definition() -> InputDefinition:
"""
Query ReconAll's input file definition (*t1_files*) to check for existing
runs.
Returns
-------
InputDefinition
ReconAll's *t1_files* input definition
"""
node = get_node()
return node.analysis_version.input_definitions.get(... | 10,192 |
def launch(workflow: str, number: int, file: str, concurrency: int):
"""Launch multiple workflows."""
results_folder_path = _build_results_folder_path(workflow)
try:
os.mkdir(results_folder_path)
except FileExistsError:
logging.info(
"Benchmark folder already exists. Will ov... | 10,193 |
def create_ou_process(action_spec, ou_stddev, ou_damping):
"""Create nested zero-mean Ornstein-Uhlenbeck processes.
The temporal update equation is:
.. code-block:: python
x_next = (1 - damping) * x + N(0, std_dev)
Note: if ``action_spec`` is nested, the returned nested OUProcess will not be... | 10,194 |
def clean_data(df):
"""
remove the duplicates from a dataframe
parameters:
df(Dataframe): data frame
"""
df=df.drop_duplicates()
return df | 10,195 |
def IssueFactory(data, journal_id, issue_order):
"""
Realiza o registro fascículo utilizando o opac schema.
Esta função pode lançar a exceção `models.Journal.DoesNotExist`.
"""
mongo_connect()
metadata = data["metadata"]
issue = models.Issue()
issue._id = issue.iid = data.get("id")
... | 10,196 |
def delete_images(ec2_conn, ids=None, tags=None, owners=None, name=None):
"""Delete (unregister) AMI images."""
images = list_images(
ec2_conn, ids=ids, tags=tags, owners=owners, name=name
)
if not images:
if ids:
raise exc.NotFoundError('No image id {} found.'.format(ids))
... | 10,197 |
def _is_permission_in_db(permission_name: str):
"""To check whether the given permission is in the DB
Parameters
----------
permission_name: str
A permission name we use internally.
E.g., hazard, hazard:hazard, project...
"""
return bool(
models.Auth0Permission.query.fil... | 10,198 |
def check(source):
"""Return messages from pyflakes."""
if sys.version_info[0] == 2 and isinstance(source, unicode):
# Convert back to original byte string encoding, otherwise pyflakes
# call to compile() will complain. See PEP 263. This only affects
# Python 2.
try:
... | 10,199 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.