content stringlengths 22 815k | id int64 0 4.91M |
|---|---|
def _get_client(app):
"""Returns a client instance for an App.
If the App already has a client associated with it, simply returns
it. Otherwise creates a new client, and adds it to the App before
returning it.
Args:
app: A Firebase App instance (or ``None`` to use the default App).
Re... | 9,000 |
def get_job_config_build_for_branch(**kwargs):
"""pass kwargs to JobConfig constructor"""
return JobConfig(
type=JobType.copr_build,
trigger=JobConfigTriggerType.commit,
branch="build-branch",
scratch=True,
**kwargs,
) | 9,001 |
def get_connection_string_from_config_file(cfg_src, db_cfg_key):
"""
Gets connection parameters from specified section in
a configuration file.
"""
# reading complete configuration
with open(cfg_src, 'r') as yml_file:
cfg = yaml.safe_load(yml_file)
# looking for specified connection... | 9,002 |
def _decomp_0_matrices(
kak: 'cirq.KakDecomposition',
atol: float = 1e-8,
) -> Tuple[Sequence[Tuple[np.ndarray, np.ndarray]], complex]:
"""Returns the single-qubit matrices for the 0-SQRT_ISWAP decomposition.
Assumes canonical x, y, z and (x, y, z) = (0, 0, 0) within tolerance.
"""
# Pairs of s... | 9,003 |
def repo_crc32c():
"""
Link crc32c library.
"""
maybe(
http_archive,
name = "com_google_crc32c", # 1.1.2
build_file = "@toolbase//third_party/crc32c:crc32c.BUILD",
sha256 = "a40244a21b9ea50808b214e127e53500f3ef50defca2db2f7125cf95008431bd",
strip_prefix = "crc32... | 9,004 |
def test_ranking(preds, target):
""" test that ranking function works as expected """
for p, t in zip(preds, target):
scipy_ranking = [rankdata(p.numpy()), rankdata(t.numpy())]
tm_ranking = [_rank_data(p), _rank_data(t)]
assert (torch.tensor(scipy_ranking[0]) == tm_ranking[0]).all()
... | 9,005 |
def test_get_parent_process(requests_mock):
"""
Given - connection id and ptid to get its parent.
When -
Running get_parent_process function.
Then -
The process parent should be returned.
"""
api_raw_response = util_load_json('test_files/get_parent_process.json')
requests_... | 9,006 |
def concave(x, m):
"""Shape function."""
assert shape_args_ok(x, m)
result = 1.0
for i in range(1, len(x) - m + 1):
result *= math.sin(x[i - 1] * math.pi / 2.0)
if m != 1:
result *= math.cos(x[len(x) - m] * math.pi / 2.0)
return correct_to_01(result) | 9,007 |
def get_filenames(is_training, data_dir):
"""Return filenames for dataset."""
if is_training:
return [
os.path.join(data_dir, 'train-%05d-of-01024' % i)
for i in range(_NUM_TRAIN_FILES)]
else:
return [
os.path.join(data_dir, 'validation-%05d-of-00128' % i)... | 9,008 |
def norm_img(img):
"""
normalization image
:param img: (C, H, W)
:return:
norm_img: (C, H, W)
"""
height, width, channel = img.shape
img = np.reshape(img, (height * width, channel)) # (height * width, channel)
mean = np.mean(img, axis=0, keepdims=True) # (1, channel)
center... | 9,009 |
def test_positional_only_and_arg_invalid_calls(a, b, /, c):
"""
>>> test_positional_only_and_arg_invalid_calls(1, 2, 3)
>>> test_positional_only_and_arg_invalid_calls(1, 2, c=3)
>>> test_positional_only_and_arg_invalid_calls(1, 2) # doctest: +ELLIPSIS
Traceback (most recent call last):
TypeErro... | 9,010 |
def propagator_radial_diffusion(n,dim_rad,rate,wrad,lagtime,
lmax,bessel0_zeros,bessels,):
"""calculate propagator for radial diffusion as matrix exponential
n -- dim_trans, dimension transition matrix, usually number of bins in z-direction
dim_rad -- dimension transition matrix, always equal... | 9,011 |
def convert_examples_to_features(examples, use_label):
"""Loads a data file into a list of `InputBatch`s."""
features = []
line_tags = []
for (ex_index, example) in enumerate(examples):
if use_label:
labels = example.labels
else:
labels = ['O'] * len(example.uni... | 9,012 |
def broadcast_to_rank(t, rank, axis = -1):
"""Appends dimensions to tf.Tensor `t` at axis `axis` to match rank `rank`."""
rank_t = t.shape.rank # Assumes ranks are known at compile time (static).
for _ in range(rank - rank_t):
t = tf.expand_dims(t, axis=axis)
return t | 9,013 |
def operator(func):
"""
Help decorator to rewrite a function so that
it returns another function from it.
"""
@wraps(func)
def wrapper(*args, **kwargs):
def operator(stream):
return func(stream, *args, **kwargs)
return operator
return wrapper | 9,014 |
def json(body, charset="utf-8", **kwargs):
"""Takes JSON formatted data, converting it into native Python objects"""
return json_converter.loads(text(body, charset=charset)) | 9,015 |
def download_edictos(
data_dir=f"{os.environ['HOME']}/data/corteconstitucional/edictos",
):
"""
needs to be run several times, some times it claims that it cannot find downloaded pdfs,
:param data_dir:
:return:
"""
url = "https://www.corteconstitucional.gov.co/secretaria/edictos/"
downlo... | 9,016 |
def read_corpus(file_path, encoding=ENCODING, **kwargs):
"""
Create a Linguistica object with a corpus data file.
:param file_path: path of input corpus file
:param encoding: encoding of the file at *file_path*. Default: ``'utf8'``
:param kwargs: keyword arguments for parameters and their values.
... | 9,017 |
def sample_pagerank(corpus, damping_factor, n):
"""
Return PageRank values for each page by sampling `n` pages
according to transition model, starting with a page at random.
Return a dictionary where keys are page names, and values are
their estimated PageRank value (a value between 0 and 1). All
... | 9,018 |
def parse_args() -> argparse.Namespace:
"""
Parse program arguments
:return: Parser values
"""
parser = argparse.ArgumentParser(description="")
parser.add_argument("-a", action="store_true")
parser.add_argument("-c", action="store_true")
parser.add_argument("-x", action="store_true")
... | 9,019 |
def compute_participants(matches, challonge_data):
"""Compute series participants.
Iterate all matches and players to create a graph.
Apply connected components algorithm to resolve distinct
participant groups over all matches.
Sort participant groups by number of wins to correlate
with Challo... | 9,020 |
def write_haiku(word_array, is_ipv6):
"""Return the beautiful haiku"""
# String to place in schema to show word slot.
octct = 'OCTET'
schema = get_schema(is_ipv6, octct)
# Replace each instance of 'octet' in the schema with a word from
# the encoded word array.
for i in range(len(word_array... | 9,021 |
def import_data():
"""
Utility function to imoprt summary tsv ready for usage in PyMol
"""
col_types = {
'sift_score': float, 'sift_median': float, 'total_energy': float,
'interaction_energy': float, 'diff_interaction_energy': float,
'diff_interface_residues': float, 'freq': floa... | 9,022 |
def test_b_traversal_from_neighborless_node_gets_one_node_list(full_graph_1):
"""Test that traversing from neightborless node gets one node list."""
assert full_graph_1.breadth_first_traversal(21) == [21] | 9,023 |
def get_current_datetime():
"""
Get the current datetime.
Note: This function is intended to be mocked in testing
Return:
time(datetime.datetime): current datetime
"""
return datetime.datetime.now(current_app.config['TIMEZONE']) | 9,024 |
def launch_task(task):
"""
Use this function to launch a task, by passing a common.Task
instance
Arguments:
- task <common.Task>
"""
content_type = ContentType.objects.get_for_model(task._meta.model)
celery_id = _launch_task.delay(content_type.id, task.id)
task.queue_id = celery... | 9,025 |
def _delete_vsx_interface_vlan_v1(vlan_id, **kwargs):
"""
Perform PUT calls on a VLAN interface to remove VSX IPv4 settings
:param vlan_id: Numeric ID of VLAN to that will be configured
:param kwargs:
keyword s: requests.session object with loaded cookie jar
keyword url: URL in main() fu... | 9,026 |
def adapt_all(iterable, to_cls):
"""
Returns a list of items from adapting each item in iterable to `cls`
If `iterable` is `None`, an empty list will be returned.
"""
if iterable is None:
return []
return [adapt(obj, to_cls) for obj in iterable] | 9,027 |
def get_config(seed, shot):
"""
Uses a given base 1-shot config to replicate it for 'shot' and 'seed'.
Changes dataset training split, cfg.OUTPUT_DIR and iteration number and steps accordingly.
"""
base_config_path: str = args.base_config
assert '1shot' in base_config_path
dataset_... | 9,028 |
def copy_code(outdir):
"""Copies files to the outdir to store complete script with each experiment"""
code = []
exclude = set([])
for root, _, files in os.walk("./code", topdown=True):
for f in files:
if not f.endswith('.py'):
continue
code += [(root,f)]
... | 9,029 |
def log_sum_exp_vb(vec, m_size):
"""
calculate log of exp sum
args:
vec (batch_size, vanishing_dim, hidden_dim) : input tensor
m_size : hidden_dim
return:
batch_size, hidden_dim
"""
_, idx = torch.max(vec, 1) # B * 1 * M
max_score = torch.gather(vec, 1, idx.view(-1, 1, m_size)).view(-1, 1, m_size) # B *... | 9,030 |
def C_fun_gen(fractions, speciesindices, y, time):
"""
Calculate the distribution of carbon functional groups as a percent of
total carbon.
Parameters
----------
fractions : list
The lumped phases that you want to include (as specified
in MW['speci... | 9,031 |
def get_branch(repo):
""" Retrieve the current branch of a dulwich repository
"""
refnames, sha = repo.refs.follow(b"HEAD")
if len(refnames) != 2:
LOGGER.debug("Got more than two refnames for HEAD!")
for ref in refnames:
if ref != b"HEAD":
return to_utf8(ref) | 9,032 |
def test_empty_collection():
"""Test empty collection"""
with AnkiEmpty() as a:
assert a.col.cardCount() == 0
assert len(a.model_names) == 5 | 9,033 |
def print_grid(grid_in):
"""
Prints the elements of grid_in with a space between each element along the
columns and a new line for each row. Assumes that grid_in is a list of
lists.
"""
#iterate through grid
for row in grid_in:
for elem in row:
# print element followed by... | 9,034 |
def _sigmoid(x):
"""
Sigmoid function that smoothly limits values between 0.0 and 1.0
:param x: Numpy array with float values that are to be limited.
:return: Numpy array with float values between 0.0 and 1.0
"""
return 1.0 / (1.0 + np.exp(-x)) | 9,035 |
def inherit_n_genes_prob(n, n_father, n_mother, mutation_prob) -> Dict:
"""Returns dictionary with distribution of conditional probability of
inherited genes given that father has n_father genes and mother has
n_mother genes, taking into account probability of mutations."""
# Probabily distributions:
... | 9,036 |
def battle(player, npc):
"""
:param player: Monster
:param npc: Player
:return: None
"""
for item in inventory.inv.equipment.values():
if item.item_type == 'Weapon':
weapon_attack = item.attack
break
else:
weapon_attack = 0
for item in inventory.in... | 9,037 |
async def get_metrics_address_counts_summary():
"""
Latest summary of address counts.
"""
qry = f"""
select col
, latest
, diff_1d
, diff_1w
, diff_4w
, diff_6m
, diff_1y
from mtr.address_counts_by_minimal_balance_ch... | 9,038 |
async def processor(db, document: Dict[str, Any]) -> Dict[str, Any]:
"""
Process a history document before it is returned to the client.
:param db: the application object
:param document: the document to process
:return: the processed document
"""
return await apply_transforms(
virt... | 9,039 |
def _unit_scale_traindata(X, xmins, xmaxs):
"""If xmax > xmin, unit-scale the training data, else do nothing
Parameters
----------
x : ndarray of shape (m, n)
xmins : ndarray of shape (n, )
xmaxs : ndarray of shape (n, )
Returns
-------
result : ndarray of shape (m, n)
Notes... | 9,040 |
def script() -> None:
"""Function for running LADiM as a command line application"""
parser = argparse.ArgumentParser(
description="LADiM 2.0 — Lagrangian Advection and Diffusion Model"
)
parser.add_argument(
"-d",
"--debug",
help="Show debug information",
action... | 9,041 |
def get_port_properties(port):
"""Retrieves common port properties from its package.sh file.
Returns:
dict: keys are values from PORT_PROPERTIES, values are from the package.sh file
"""
props = {}
for prop in PORT_PROPERTIES:
res = subprocess.run(f"cd {port}; exec ./package.sh show... | 9,042 |
def test_compute_reproject_roi_issue647():
"""In some scenarios non-overlapping geoboxes will result in non-empty
`roi_dst` even though `roi_src` is empty.
Test this case separately.
"""
src = GeoBox(
10980, 10980, Affine(10, 0, 300000, 0, -10, 5900020), CRS("epsg:32756")
)
dst = ... | 9,043 |
def run_HDBSCAN_subclustering(df=None, target=None, cluster_col="Cluster", soft_clustering=True,
min_cluster_size=100, min_samples=10,
cluster_selection_epsilon=0.0, cluster_selection_method='eom',
draw_condensed_tree=True, core_dist_n_jobs=None):
"""An implement of H... | 9,044 |
def extractive_explanations(
dataset,
prefix='explain sentiment',
input_feature='review',
output_classes=('negative', 'positive'),
drop_explanations=False
):
"""Preprocessor to handle extractive rationale prediction datasets.
The preprocessor expects a dataset with the provided 'input_featu... | 9,045 |
async def test_signal_repetitions_cancelling(hass, monkeypatch):
"""Cancel outstanding repetitions when state changed."""
config = {
"rflink": {"port": "/dev/ttyABC0"},
DOMAIN: {
"platform": "rflink",
"devices": {"protocol_0_0": {"name": "test", "signal_repetitions": 3}},... | 9,046 |
def download_eval_public(dataset_folder):
""" Download the public eval part of desed dataset from Zenodo.
Args:
dataset_folder: str, the path to the root of the dataset where to download the evaluation files (this folder
contains audio and metadata folders).
Returns:
"""
creat... | 9,047 |
def _extractFile(configuration, zipFile, zipPath, zipInfo, targetDir, absTargetDir, onlyNewer):
"""Extract the ZipInfo object to a physical file at targetDir.
"""
engine = configuration.engine
targetFile = os.path.join(targetDir, zipInfo.filename)
absTargetFile = os.path.join(absTargetDir, zipInfo.filename)
... | 9,048 |
def corr_weighted_kendalltau(top_list_prev, top_list, use_fast=True):
"""Compute weighted Kendall's Tau correlation (based on custom implementation!).
NOTE: Lists are DataFrame columns AND they must be sorted according to their value!!!"""
# it is irrelevant whether we compute kendall for ranks or scores.
... | 9,049 |
def test_Order_PUT_request(app):
"""with app.test_client() as client:
response = client.put(
'/orders/3',
data=json.dumps(dict(
status='Pedido Concluído',
)),
content_type='application/json',
)
data = json.loads(response.data.d... | 9,050 |
def generate_processes_by_exposure(exposure):
""" Creates a simulated process based on an exposure.
Arguments:
exposure {object} -- Exposure model
Raises:
ValueError -- returns when there is no processing
with a respective exposure.
Returns:
object -- Proce... | 9,051 |
def cross_val_confusion(classifier, X, y, cv=None):
"""
Evaluate confusion matrix and score from each fold of cross validation
Parameters:
----------
classifier: classifier object
The object used to fit the data.
X[ndarray]: shape=(n_sample, n_feature)
y[ndarray]: shape=(n_sample,)... | 9,052 |
async def async_unload_entry(hass: HomeAssistantType, entry: ConfigEntry):
"""Unload Synology DSM sensors."""
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(entry, platform)
for platform in PLATFORMS
]
... | 9,053 |
def transfer_segm_labels(verts_before, mesh, dir_path, name):
"""
Save segmentation labels for mesh after scan imitation
"""
verts_after = utils.get_vertices_np(mesh)
verts_mapping = utils.match_vert_lists(verts_after, verts_before)
# print(os.path.join(dir_path, name + '_sim_segmentation.t... | 9,054 |
def confirm_control_contains(trestle_dir: pathlib.Path, control_id: str, part_label: str, seek_str: str) -> bool:
"""Confirm the text is present in the control markdown in the correct part."""
control_dir = trestle_dir / ssp_name / control_id.split('-')[0]
md_file = control_dir / f'{control_id}.md'
res... | 9,055 |
def describe_current_subtask(subtask, prefix=True):
"""
Make a 'natural' language description of subtask name
"""
to_verb = {"AnswerQuestion": "answering a question",
"ArmGoal": "moving my arm",
"DemoPresentation": "giving a demo",
"Find": "finding",
... | 9,056 |
def setup_base_repo(ctx, config):
"""
Setup repo based on redhat nodes
redhat:
base-repo-url: base url that provides Mon, OSD, Tools etc
installer-repo-url: Installer url that provides Agent, Installer
deb-repo-url: debian repo url
deb-gpg-key: gpg key used for signing the build
... | 9,057 |
def addprint(x: int, y: int):
"""Print and "added" representation of `x` and `y`."""
expr = x + y
return "base addprint(x=%r, y=%r): %r" % (x, y, expr) | 9,058 |
def parse_date(deadline_date):
"""
Given a date in the form MM/DD/YY or MM/DD/YYYY, returns
the integers MM, DD, and YYYY (or YY) in this order.
"""
deadline_split = re.split('\\/|\\-', deadline_date)
return int(deadline_split[0]), int(deadline_split[1]), int(deadline_split[2]) | 9,059 |
def print_distribution(distribution, out=sys.stdout):
""" Prints distribution data
"""
total = reduce(lambda x, y: x + y,
[group[1] for group in distribution])
output = ""
for group in distribution:
output += " %s: %.2f%% (%d instance%s)\n" % ( \
group[0],
... | 9,060 |
def jyfm_tools_position_fund_direction(
trade_date="2020-02-24", indicator="期货品种资金流向排名", headers=""
):
"""
交易法门-工具-资金分析-资金流向
https://www.jiaoyifamen.com/tools/position/fund/?day=2020-01-08
:param trade_date: 指定交易日
:type trade_date: str
:param indicator: "期货品种资金流向排名" or "期货主力合约资金流向排名"
:ty... | 9,061 |
def CreateConferenceRoomRunConfig(run_id):
"""Creates and stores a conference room run config entity.
Args:
run_id: str unique id to be associated with this run config.
"""
config_value = {'run_id': run_id,
'run_start_time': datetime.datetime.now()}
config_value_str = pickle.dumps(confi... | 9,062 |
def generate_points_in_areas(gdf, values, points_per_unit=1, seed=None):
"""
Create a GeoSeries of random points in polygons.
Parameters
----------
gdf : GeoDataFrame
The areas in which to create points
values : str or Series
The [possibly scaled] number of points to create in each area
points_per_unit : nu... | 9,063 |
def _log_parameter_search_results_as_artifact(cv_results_df, run_id):
"""
Records a collection of parameter search results as an MLflow artifact
for the specified run.
:param cv_results_df: A Pandas DataFrame containing the results of a parameter search
training session, which... | 9,064 |
def validate_kata():
"""
Validate Kata
"""
wait_for_installation()
here = os.path.dirname(os.path.abspath(__file__))
manifest = os.path.join(here, "templates", "nginx-kata.yaml")
kubectl("apply -f {}".format(manifest))
wait_for_pod_state("", "default", "running", label="app=kata")
ku... | 9,065 |
def montager(xi, col=None, row=None, aspect=1.4, transpose=False, isRGB=False,
flipx=False, flipy=False, flipz=False, output_grid_size=False):
""" tile a 3D or 4D image into a single 2D montage
Parameters
----------
xi : ndarray
image data to montage
col : int, optional
... | 9,066 |
def template_dict(input_dict_arg, params_dict_arg):
"""function to enable templating a dictionary"""
output_dict = input_dict_arg
for key, value in output_dict.items():
if isinstance(value, str):
output_dict[key] = params_re_str(value, params_dict_arg)
elif isinstance(value, dict... | 9,067 |
def test_publishlib_name_from_metadata_problem(store_mock, config):
"""The metadata wasn't there to get the name."""
args = Namespace(library="charms.testcharm.v0.testlib")
with patch("charmcraft.commands.store.get_name_from_metadata") as mock:
mock.return_value = None
with pytest.raises(Cra... | 9,068 |
def _traverse_dictionaries(instance, parent="spin_systems"):
"""Parses through the instance object contained within the parent object and return
a list of attributes that are populated.
Args:
instance: An instance object from the parent object.
parent: a string object used to create the add... | 9,069 |
def getSingleChildTextByName(rootNode, name):
"""Returns the text of a child node found by name.
Only one such named child is expected.
"""
try:
nodeList = [e.firstChild.data for e in rootNode.childNodes if e.localName == name]
if len(nodeList) > 0:
return nodeList[0]
... | 9,070 |
def test_aggregate_stores_output_in_record(configured_test_manager):
"""An aggregate output should exist in the record state."""
@aggregate(["output"])
def small_aggregate(record, records):
return "hello world"
record = Record(configured_test_manager, None)
small_aggregate(record, [record]... | 9,071 |
def _get_energy_ratio_single_wd_bin_bootstrapping(
df_binned,
df_freq,
N=1,
percentiles=[5.0, 95.0],
return_detailed_output=False,
):
"""Get the energy ratio for one particular wind direction bin and
an array of wind speed bins. This function also includes bootstrapping
functionality by ... | 9,072 |
def save_tasks(
grid,
year,
temporal_range,
frequency,
output,
products,
dataset_filter,
env,
complevel,
overwrite=False,
tiles=None,
debug=False,
gqa=None,
):
"""
Prepare tasks for processing (query db).
<todo more help goes here>
\b
Not yet imp... | 9,073 |
def get_file_name(part):
"""get file name using regex from fragment ID"""
return re.findall(r"='(.*\-[a-z]+).*", part)[0] | 9,074 |
def main():
"""Main entrance for training"""
args = parser.parse_args()
print(sys.argv)
#context.set_context(mode=context.GRAPH_MODE)
context.set_context(mode=context.PYNATIVE_MODE)
if args.GPU:
context.set_context(device_target='GPU')
# parse model argument
assert args.model.... | 9,075 |
def get_helping_materials(project_id, limit=100, offset=0, last_id=None):
"""Return a list of helping materials for a given project ID.
:param project_id: PYBOSSA Project ID
:type project_id: integer
:param limit: Number of returned items, default 100
:type limit: integer
:param offset: Offset ... | 9,076 |
def generate_sphere_points(n):
"""
Returns list of 3d coordinates of points on a sphere using the
Golden Section Spiral algorithm.
"""
points = []
inc = math.pi * (3 - math.sqrt(5))
offset = 2 / float(n)
for k in range(int(n)):
y = k * offset - 1 + (offset / 2)
r = math.s... | 9,077 |
def svn_repos_dir_delta2(*args):
"""
svn_repos_dir_delta2(svn_fs_root_t src_root, char src_parent_dir, char src_entry,
svn_fs_root_t tgt_root, char tgt_path,
svn_delta_editor_t editor, void edit_baton,
svn_repos_authz_func_t authz_read_func, svn_boolean_t text_deltas,
svn_depth... | 9,078 |
def log_error(e):
"""
Print any errors.
"""
print(e) | 9,079 |
def linear_r2_points(points: np.ndarray, coef: tuple, r2: R2 = R2.classic) -> float:
"""Computes the coefficient of determination (R2).
Args:
points (np.ndarray): numpy array with the points (x, y)
coef (tuple): the coefficients from the linear fit
r2 (R2): select the type of coefficien... | 9,080 |
def _show_tournament_list() -> List:
"""
Функция возвращает список предстоящих турниров
"""
tournaments = []
for tournament in loop.run_until_complete(get_request('https://codeforces.com/api/contest.list?gym=false')):
if tournament['phase'] != 'BEFORE':
break
tournaments.append(tournament)
for tournament i... | 9,081 |
def init_tof_1(xshut):
"""XSHUT port HIGH enables the device."""
rpi_gpio.setup_output(xshut)
rpi_gpio.write_output(xshut, 1) | 9,082 |
def less_goals_scored():
"""
returns the lowest number of goals scored during one week
"""
return goals_scored('min') | 9,083 |
def compute_avgpool_output_shape(input_shape:Sequence[Union[int, None]],
kernel_size:Union[Sequence[int], int]=1,
stride:Union[Sequence[int], int]=1,
padding:Union[Sequence[int], int]=0,
c... | 9,084 |
def notebuild():
"""
build tool
"""
args = command_line_parser()
package = PackageBuild()
if args.command == 'pull':
package.git_pull()
elif args.command == 'push':
package.git_push()
elif args.command == 'install':
package.git_install()
elif args.command == '... | 9,085 |
def _obs_intersect(((x0, y0), (x1, y1)), ((x2, y2), (x3, y3))):
"""Check if two lines intersect. The boundaries don't count as
intersection."""
base1 = (x0, y0)
base2 = (x2, y2)
dir1 = (x1-x0, y1-y0)
dir2 = (x3-x2, y3-y2)
t1, t2 = _intersect(base1, dir1, base2, dir2)
eps = 0.00001
... | 9,086 |
def project_configure(request, project_name):
"""
get configuration
:param request: request object
:param project_name: project name
:return: json
"""
# get configuration
if request.method == 'GET':
project = Project.objects.get(name=project_name)
project = model_to_dict(... | 9,087 |
def run_epoch(session, model, eval_op=None, verbose=False):
"""Runs the model on the given data."""
start_time = time.time()
costs = 0.0
iters = 0
state = session.run(model.initial_state)
fetches = {
"cost": model.cost,
"final_state": model.final_state,
}
if eval_op is not None:
fetches["eval_op"] = ev... | 9,088 |
def unemployment(
token="", version="stable", filter="", format="json", **timeseries_kwargs
):
"""Economic data
https://iexcloud.io/docs/api/#economic-data
Args:
token (str): Access token
version (str): API version
filter (str): filters: https://iexcloud.io/docs/api/#filter-res... | 9,089 |
def print_dict(d):
"""按照键值打印一个字典"""
for key,value in d.items():
print( key + ':' + str(value)) | 9,090 |
def test_crd_nof_shots(crd_file):
"""Return number of shots."""
hdr, _, _, fname = crd_file
crd = CRDReader(Path(fname))
assert crd.nof_shots == hdr["nofShots"] | 9,091 |
def splitunc(p):
"""Deprecated since Python 3.1. Please use splitdrive() instead;
it now handles UNC paths.
Split a pathname into UNC mount point and relative path specifiers.
Return a 2-tuple (unc, rest); either part may be empty.
If unc is not empty, it has the form '//host/mount' (or si... | 9,092 |
def expand_locations(ctx, input, targets = []):
"""Expand location templates.
Expands all `$(execpath ...)`, `$(rootpath ...)` and deprecated `$(location ...)` templates in the
given string by replacing with the expanded path. Expansion only works for labels that point to direct dependencies
of this ru... | 9,093 |
def scrape_db(test=False, write_file=True):
"""
Function to scrape bodybuild.com recipe database and save results as json.
Parameters:
-----------
"""
# Hacky way to get all recipes - you have to request the number. Luckily,
# this is listed at the beginning of any result you pull from DB... | 9,094 |
def require(required):
""" Decorator for checking the required values in state.
It checks the required attributes in the passed state and stop when
any of those is missing. """
def decorator(function):
@functools.wraps(function)
def wrapper(*args, **kwargs):
for key in requ... | 9,095 |
def set_list(event):
"""
insert an edited line from the entry widget
back into the listbox
"""
try:
index = listbox1.curselection()[0]
# delete old listbox line
listbox1.delete(index)
except IndexError:
index = tk.END
# insert edited item back into... | 9,096 |
def simplify_unicode(sentence):
"""
Most accented Latin characters are pronounced just the same as the base character.
Shrink as many extended Unicode repertoire into the Estonian alphabet as possible.
It is GOOD for machine learning to have smaller ortographic repertoire.
It is a BAD idea if we sta... | 9,097 |
def read_file(pickle_file_name):
"""Reads composite or non-composite novelty results from Pickle file.
:param pickle_file_name: Path to input file (created by
`write_standard_file` or `write_pmm_file`).
:return: novelty_dict: Has the following keys if not a composite...
novelty_dict['denorm_rad... | 9,098 |
def AutoscalersForMigs(migs, autoscalers, project):
"""Finds Autoscalers with target amongst given IGMs.
Args:
migs: List of triples (IGM name, scope type, scope name).
autoscalers: A list of Autoscalers to search among.
project: Project owning resources.
Returns:
A list of all Autoscalers with t... | 9,099 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.