content stringlengths 22 815k | id int64 0 4.91M |
|---|---|
def mock_imap_search_error():
"""Mock imap class values."""
with patch(
"custom_components.mail_and_packages.helpers.imaplib"
) as mock_imap_search_error:
mock_conn = mock.Mock(spec=imaplib.IMAP4_SSL)
mock_imap_search_error.IMAP4_SSL.return_value = mock_conn
mock_conn.login.... | 7,000 |
def delete_models_shares_groups(id, group_id, client=None):
"""Revoke the permissions a group has on this object
Use this function on both training and scoring jobs.
Parameters
----------
id : integer
The ID of the resource that is shared.
group_id : integer
The ID of the group... | 7,001 |
def step1ddiffusionanalytical(q, dt, alpha, beta, prng=np.random, **kwargs):
"""Analytical time stepping as proposed in Jenkins, Spano arXiv:1506.06998
Uses the asymptotic normality of the death process for small times
(see Griffiths, J. Math. Bio, 1984)
"""
theta = alpha+beta
beta_ =... | 7,002 |
def no_vtk():
""" Checks if VTK is installed and the python wrapper is functional """
global _vtk_version
return _vtk_version is None | 7,003 |
def _download_type(
archive, manifest, model_class, batch_size, privacy_transform_fn):
"""Downloads a set of files and adds them to the archive."""
json_path = os.path.join(
os.path.dirname(archive.path), '%s.json' % model_class)
_LOG.info(
'Adding entities of type %s to temporary file... | 7,004 |
def get_request_list(flow_list: list) -> list:
"""
将flow list转换为request list。在mitmproxy中,flow是对request和response的总称,这个功能只获取request。
:param flow_list: flow的列表
:return: request的列表
"""
req_list = []
for flow in flow_list:
request = flow.get("request")
req_list.append(request)
... | 7,005 |
def safety(session: Session) -> None:
"""Scan dependencies for insecure packages."""
poetry = Poetry(session)
with poetry.export("--dev", "--without-hashes") as requirements:
install(session, "safety")
session.run("safety", "check", f"--file={requirements}", "--bare") | 7,006 |
def query_total_production(start_date, end_date) -> Tuple[int]:
"""Total count of semi production on the given time interval"""
semi_count = None
fg_count = None
try:
with stSession() as s:
semi_count = (
s.query(ProductionScan)
.filter(
... | 7,007 |
def createPolyPlaneCtx(*args, **kwargs):
"""
Flags:
- attachToSubdivisionsAll : asa (bool) []
- attachToSubdivisionsHeight : ash (bool) []
- attachToSubdivisionsWidth : asw (bool) []
- axis : ax (int) []
... | 7,008 |
def add_fields(_, level, event_dict):
""" Add custom fields to each record. """
now = dt.datetime.now()
event_dict['timestamp'] = TZ.localize(now, True).astimezone(pytz.utc).isoformat()
event_dict['level'] = level
if session:
event_dict['session_id'] = session.get('session_id')
if requ... | 7,009 |
def Smith_set(A,P,params,election_ID,printing_wanted=False):
"""
Compute and return a list of the candidates in the Smith set.
This is the smallest set of candidates such that every candidate in the
Smith set beats every candidate not in the Smith set in one-on-one contests.
In this implementation, ... | 7,010 |
def run_async_from_thread(func: Callable[..., Coroutine[Any, Any, T_Retval]], *args) -> T_Retval:
"""
Call a coroutine function from a worker thread.
:param func: a coroutine function
:param args: positional arguments for the callable
:return: the return value of the coroutine function
"""
... | 7,011 |
def check_tensor_shape(tensor_tf, target_shape):
""" Return a Tensorflow boolean graph that indicates whether
sample[features_key] has the specified target shape. Only check
not None entries of target_shape.
:param tensor_tf: Tensor to check shape for.
:param target_shape: Target shape to compare t... | 7,012 |
def print_as_markdown_table(l, heading=None):
"""print(`l` as a markdown formatted table)
Parameters
----------
l : list of lists or list of tuples or pandas.Series
the list of data you want printed. All rows must be same length
heading : list
a list of column headings. Must be same... | 7,013 |
def main(bind, workers):
"""Run the WSGI service."""
BarrierApplication(app, locals()).run() | 7,014 |
def LabelAddressPlus(ea, name, force=False, append_once=False, unnamed=False, nousername=False, named=False, throw=False):
"""
Label an address with name (forced) or an alternative_01
:param ea: address
:param name: desired name
:param force: force name (displace existing name)
:param append_onc... | 7,015 |
def do_match(station1, station2, latitude, elevation, distance):
"""
Perform the match between two stations.
Do initial latitude check to speed up the test
(not longitude as this isn't a constant distance)
Return probabilities for elevation, separation and Jaccard Index
:param Station Class... | 7,016 |
def rotation_matrix_from_vectors(vec1, vec2):
""" Find the rotation matrix that aligns vec1 to vec2
Args
----
vec1 (numpy.ndarray): A 3d "source" vector
vec2 (numpy.ndarray): A 3d "destination" vector
Returns
-------
numpy.ndarray: A transform matrix (3x3) which when applie... | 7,017 |
def get_file_from_cache_if_exists(file_path,
update_modification_time_on_access=True):
"""Get file from nfs cache if available."""
cache_file_path = get_cache_file_path(file_path)
if not cache_file_path or not file_exists_in_cache(cache_file_path):
# If the file does not exis... | 7,018 |
def audio(src, type="audio/ogg", other_attr={}):
"""
add audio file
args:
src <str> : source file
type <str> : type of audio file
other_attr <dict> : other attributes
"""
return f"""
<audio {_parse_attr(other_attr)}>
<source src="{src}" type="{type}"... | 7,019 |
def test_triangle(dim):
"""
Tests if dimensions can come from a triangle.
dim is a list or tuple of the three dimensions
"""
dim = [int(x) for x in dim]
dim.sort()
if dim[0] + dim[1] > dim[2]:
return True
else:
return False | 7,020 |
def arg_parser(data: str):
"""parse "x[a1, a2, a3], y[k1=a1, a2, k3=a3], z"
nested [] are ignored.
"""
res: List[NameWithAttrs] = _ARG_WITH_ATTR_PARSER.parse(data)
return res | 7,021 |
def _get_resource(span):
"""Get resource name for span"""
if "http.method" in span.attributes:
route = span.attributes.get("http.route")
return (
span.attributes["http.method"] + " " + route
if route
else span.attributes["http.method"]
)
return sp... | 7,022 |
def merge(
_0: dask.dataframe.core.DataFrame,
_1: dask.dataframe.core.DataFrame,
/,
*,
how: Literal["inner"],
left_on: Literal["x"],
right_index: bool,
shuffle: Literal["disk"],
):
"""
usage.dask: 1
"""
... | 7,023 |
def preprocess_hedge_funds(csv, fund_name):
"""
Get hedge funds holding from their 13F filling. Data is from https://whalewisdom.com/.
You need to sign up a free account to access the csv files. Data is updated quarterly.
Parameters
----------
csv: str
csv file path
fund_name: str
... | 7,024 |
def get_draw_title(kdata):
"""根据typ值,返回相应的标题,如 上证指数(日线)
参数:kdata: KData实例
返回:一个包含stock名称的字符串,可用作绘图时的标题
"""
if not kdata:
return ""
query = kdata.getQuery()
stock = kdata.getStock()
if stock.isNull():
return ""
s1 = ''
if query.kType == KQuery.KType.DAY:
... | 7,025 |
def _B(slot):
"""Convert slot to Byte boundary"""
return slot*2 | 7,026 |
def nll(perm, true):
"""
perm: (n, n) or (s, n, n)
true: (n)
"""
n = true.size(-1)
# i = torch.arange(n, device=perm.device)
# j = true.to(perm.device)
# print("perm.nll:", perm.size(), true.size())
elements = perm.cpu()[..., torch.arange(n), true]
# elements = perm.cpu()[torch.a... | 7,027 |
def _peaks_colors_from_points(points, colors=None, points_per_line=2):
"""
Returns a VTK scalar array containing colors information for each one of
the peaks according to the policy defined by the parameter colors.
Parameters
----------
points : (N, 3) array or ndarray
points coordinate... | 7,028 |
def log_verbose(message):
"""Logs a message to stdout if VERBOSE is True"""
if VERBOSE:
print(message) | 7,029 |
def epi_reg(epi, t1, t1brain, out='epi_reg', **kwargs):
"""Wrapper for the ``epi_reg`` command.
:arg epi: Input EPI image
:arg t1: Input wholehead T1 image
:arg t1brain: Input brain extracted T1 image
:arg out: Output name
"""
asrt.assertIsNifti(epi)
asrt.assertIsNi... | 7,030 |
def load_towns():
"""Sample of Wikipedia dataset that contains informations about Toulouse, Paris, Lyon and
Bordeaux.
Examples
--------
>>> from pprint import pprint as print
>>> from cherche import data
>>> towns = data.load_towns()
>>> print(towns[:3])
[{'article': 'Paris (Fren... | 7,031 |
def nonce_initialization(params: InitializeNonceParams) -> TransactionInstruction:
"""Generate an instruction to initialize a Nonce account.
Args:
params: The nonce initialization params.
Returns:
The instruction to initialize the nonce account.
"""
return TransactionInstruction.f... | 7,032 |
def to_weeknr(date=''):
"""
Transforms a date strings YYYYMMDD to the corresponding week nr (e.g. 20200713 becomes w29)
"""
week_nr = pd.to_datetime(date).to_pydatetime().isocalendar()[1]
return f"w{week_nr}" | 7,033 |
def build_logisticregression(X_loc, y_loc, args):
"""finds best parameters for logistic regression"""
Printer(colored('(training) ', 'green') +
'searching for best parameters for logistic regression')
# specify parameters and distributions to sample from
param_dist = {"C": np.logspace(-9, 3, 13),
"solver":... | 7,034 |
def load_data(experiments,
remove_outlier=True,
peptides=["A5cons",
"A6cons",
"phage_ctl_0",
"phage_ctl_1",
"phage_ctl_2",
"phage_ctl_4",
"phage_ctl... | 7,035 |
def get_lines(clearance):
"""
Add lines per reference well interval between the closest points on the
reference well and the offset well and color them according to the
calculated Separation Factor (SF) between the two wells at these points.
Parameters
----------
clearance: welleng.clea... | 7,036 |
def generate_md5_hash(filepath):
"""Returns md5 hash of file.
Args:
filepath: str. Absolute path to the file.
Returns:
str. Hexadecimal hash of specified file.
"""
m = hashlib.md5()
with python_utils.open_file(filepath, 'rb', encoding=None) as f:
while True:
... | 7,037 |
def app():
"""Provide the initialized Flask app."""
init_app(app_, None)
with app_.app_context():
yield app_ | 7,038 |
def status():
"""Return status."""
return jsonify(STATUS) | 7,039 |
def get_entitlement(account_id: Optional[str] = None,
customer_id: Optional[str] = None,
entitlement_id: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetEntitlementResult:
"""
Returns the requested Entitlement resource... | 7,040 |
def test_mitpe_extract(settings, base_url):
"""Verify that BeautifulSoup tags are returned per listing and course detail"""
settings.MITPE_BASE_URL = base_url
results = extract()
assert len(results) == (1 if base_url else 0)
assert results == (
[
{
"url": "https:... | 7,041 |
def skip_to_home(fxn):
""" Skips past page straight to home page if logged in
"""
@wraps(fxn)
def skipped_page_fxn(*arg, **kwargs):
if session.get('logged_in'):
return redirect(url_for('home'))
else:
return fxn(*arg, **kwargs)
return skipped_page_fxn | 7,042 |
def compute(infile, cmpdfile='compounds.txt', rxnfile='reactions.txt',
sinkfile='sinks.txt', reverse=False):
"""Convert the output from the RetroPath2.0 workflow."""
# Get content
content = dict()
with open(infile, 'r') as fh:
reader = csv.DictReader(fh)
for row in reader:
... | 7,043 |
def get_all_paths_from_directory(directory: Path, recursive: bool, paths: [str] = [], ) -> [Path]:
"""
Gets a list of file paths for all files in the given directory (and its subdirectories if recursive is true)
:param directory: The starting directory to get file paths from
:param recursive: Whether fi... | 7,044 |
def check_contigs_for_dupes(matches):
"""check for contigs that match more than 1 UCE locus"""
node_dupes = defaultdict(list)
for node in matches:
node_dupes[node] = len(set(matches[node]))
dupe_set = set([node for node in node_dupes if node_dupes[node] > 1])
return dupe_set | 7,045 |
def substitute(P, x0, x1, V=0):
"""
Substitute a variable in a polynomial array.
Args:
P (Poly) : Input data.
x0 (Poly, int) : The variable to substitute. Indicated with either unit
variable, e.g. `x`, `y`, `z`, etc. or through an integer
matching the unit va... | 7,046 |
def munkres(costs):
"""
Entry method to solve the assignment problem.
costs: list of non-infinite values entries of the cost matrix
[(i,j,value)...]
"""
solver = Munkres(costs)
return solver.munkres() | 7,047 |
def train(fold=C.TRAIN.SAMPLES.CURRENT_FOLD):
"""Do the main training loop, as described in the config.
"""
warnings.filterwarnings('always')
training_data = RgbLidarDataset('train', fold=fold)
eval_data = RgbLidarDataset('test', fold=fold)
train_sampler = make_stratified_sampler(train... | 7,048 |
def compute_mean_std(dataset):
"""
https://stats.stackexchange.com/questions/25848/how-to-sum-a-standard-deviation
"""
# global_mean = np.zeros((3 * 64), dtype=np.float64)
# global_var = np.zeros((3 * 64), dtype=np.float64)
n_items = 0
s = RunningStatistics()
for image_fname in datase... | 7,049 |
def test_clean_connections_p0(monkeypatch):
"""Add a connection, fake a closed thread and make sure it is removed."""
db_disconnect_all()
class mock_connection():
def __init__(self) -> None: self.value = _MOCK_VALUE_1
def close(self): self.value = None
def mock_connect(*args, **kwargs)... | 7,050 |
def linkrref(rref:RRef, subdirs:List[str]=[], verbose:bool=False)->None:
""" Create a 'result-' symlink under the Pylightnix experiments folder """
linkrrefs([rref], subdirs, verbose) | 7,051 |
def add(request):
"""
Add contact information.
**Templates:**
* ``rolodex/add.html``
**Template Variables:**
* form
* results: the list of similar names to allow user to check for dupes
* name: the new name that is submitted
"""
results = []
name = None
if request.m... | 7,052 |
def convert_total (letter1,number1, letter2, number2):
"""
Description
-----------
Converting the letter of a column and the number of a line from an exceldata to a range
Context
----------
is called in wrapp_ProcessUnits and wrapp_SystemData
Parameters
----------
le... | 7,053 |
def scale_facet_list(facet_list, scale):
"""
Scale list of facets by the given scaling factor
"""
new_facet_list = []
for facet in facet_list:
new_facet_list.append(scale_facet(facet, scale))
return new_facet_list | 7,054 |
def assert_mask_equal(m1, m2, err_msg=''):
"""
Asserts the equality of two masks.
"""
if m1 is nomask:
assert_(m2 is nomask)
if m2 is nomask:
assert_(m1 is nomask)
assert_array_equal(m1, m2, err_msg=err_msg) | 7,055 |
def options():
""" Prints the options for the player to choose: to Play or Quit game """
options_font_size = 50
options_font = pygame.font.SysFont('impact', options_font_size)
options_pos = [(100,500),(570,500)]
play_text = options_font.render("Play", True, black)
quit_text = options... | 7,056 |
def move_lines_to_index(uwline_index_to, lineno, uwlines, lines):
"""Method moves all lines in the list to the proper index of uwlines and
update lineno on these lines. This is useful when you want to change the
order of code lines. But note: it is not updating lineno on other lines
@:returns positi... | 7,057 |
def birch(V, E0, B0, BP, V0):
"""
From Intermetallic compounds: Principles and Practice, Vol. I: Principles
Chapter 9 pages 195-210 by M. Mehl. B. Klein, D. Papaconstantopoulos paper downloaded from Web
case where n=0
"""
E = (E0
+ 9.0/8.0*B0*V0*((V0/V)**(2.0/3.0) - 1.0)**2
+... | 7,058 |
def test_zarr_dask_nD(make_napari_viewer):
"""Test adding nD zarr image."""
viewer = make_napari_viewer()
data = zarr.zeros((200, 100, 50), chunks=(40, 20, 10))
data[53:63, 10:20, :] = 1
zdata = da.from_zarr(data)
viewer.add_image(zdata)
assert np.all(viewer.layers[0].data == zdata) | 7,059 |
def debloat(edges: set, nodes: int, threshold: tuple = (0.95, 0.95)) -> Set[Tuple[str, str]]:
"""Remove nodes with inflow and/or ourflow > threshold"""
df = pd.DataFrame(list(edges), columns=["source", "target"])
checkpoint_shape = df.shape[0]
df_inflow = df.groupby("target").count().reset_index().renam... | 7,060 |
def result(jid):
""" Displays a job result.
Args:
jid (str): The job id.
"""
job = q.fetch_job(jid)
statuses = {
'queued': 202,
'started': 202,
'finished': 200,
'failed': 500,
'job not found': 404,
}
if job:
job_status = job.get_statu... | 7,061 |
def install_(ca_path, password, ca_url):
"""Create a ca workspace."""
install(ca_path, ca_url, password) | 7,062 |
def fd_nabla_1(
x: np.ndarray,
fun: Callable,
delta_vec: np.ndarray,
) -> np.ndarray:
"""Calculate FD approximation to 1st order derivative (Jacobian/gradient).
Parameters
----------
x: Parameter vector, shape (n_par,).
fun: Function returning function values. Scalar- or vector-valued.
... | 7,063 |
def test_api_export_spacy_model(temp_folder: Path) -> None:
"""spaCy model loading is one of the things we need to support"""
use_fs(temp_folder)
bucket = Pathy("gs://my-bucket/")
bucket.mkdir(exist_ok=True)
model = spacy.blank("en")
output_path = Pathy("gs://my-bucket/models/my_model")
mode... | 7,064 |
def init(options, configuration, plugin):
""" initializes the plugin """
plugin.log("spiderpay.py initializing")
# maps destination to list of paths each of which has some score
# window corresponding to its max capacity and how much we use it
plugin.routes_in_use = {}
# per-destination queue ... | 7,065 |
def get_ref_len_from_bam(bam_path, target_contig):
"""
Fetch the length of a given reference sequence from a :py:class:`pysam.AlignmentFile`.
Parameters
----------
bam_path : str
Path to the BAM alignment
target_contig : str
The name of the contig for which to recover haplotype... | 7,066 |
def matches_geometry(block, coords, tolerances):
"""
Given the interactions of a block and some coordinates,
verify that the coordinates match the geometry as defined
in the block interactions within accpatable deviations.
"""
for bond in itertools.chain(block.interactions["bonds"], block.intera... | 7,067 |
def vstd(df, n=10):
"""
成交量标准差 vstd(10)
VSTD=STD(Volume,N)=[∑(Volume-MA(Volume,N))^2/N]^0.5
"""
_vstd = pd.DataFrame()
_vstd['date'] = df.date
_vstd['vstd'] = df.volume.rolling(n).std(ddof=1)
return _vstd | 7,068 |
def createMonatomicGas(elm, pascal):
"""createMonatomicGas(elm, pascal)
Create a gas of single atoms of the specified element at the specified pressure in Pascal and 300 K"""
return epq.Gas((elm,), (1,), pascal, 300.0, elm.toString() + " gas at %f Pa" % pascal) | 7,069 |
def _create_campaign_feed(
client, customer_id, campaign_id, feed_mapping, feed_resource_name, chain_id
):
"""Creates the campaign feed.
Args:
client: The Google Ads API client.
customer_id: The Google Ads customer ID.
campaign_id: The campaign ID to which the affiliate location ext... | 7,070 |
def boxes_to_central_line_torch(boxes):
"""See boxes_to_central_line
Args:
boxes (tensor[..., 7]): (x, y, z, l, w, h, theta) of each box
Returns:
boxes_lp (tensor[..., 3]): (a, b, c) line parameters of each box
"""
# in case length is shorter than width
bmask = boxes[..., 3] < ... | 7,071 |
def load_as_spark(url: str) -> "PySparkDataFrame": # noqa: F821
"""
Load the shared table using the give url as a Spark DataFrame. `PySpark` must be installed, and
the application must be a PySpark application with the Apache Spark Connector for Delta Sharing
installed.
:param url: a url under the... | 7,072 |
def surface_plot_go(fields, is_note_book=False):
"""Plots 3D surface plot over given theta/phi range in Fields by calculating cartesian
coordinate equivalent of spherical form."""
print("Processing SurfacePlot...")
# Finds the phi & theta range
phiSize = fields.shape[0]
thetaSize = fields.shape... | 7,073 |
def calClassSpecificProbPanel(param, expVars, altAvMat, altChosen, obsAv):
"""
Function that calculates the class specific probabilities for each decision-maker in the
dataset
Parameters
----------
param : 1D numpy array of size nExpVars.
Contains parameter values.
expVars : 2D ... | 7,074 |
def build_expression_tree(tokens):
"""Returns an ExpressionTree based upon by a tokenized expression."""
s = [] # we use Python list as stack
for t in tokens:
if t in '+-x*/': # t is an operator symbol
s.append(t) ... | 7,075 |
def unpack_blockchain(s: str) -> block.Blockchain:
"""Unapck blockchain from JSON string with b64 for bytes."""
blocks = json.loads(s)
return [_unpack_block(block) for block in blocks] | 7,076 |
def parse(options,full_path):
"""
Parse the data according to several regexes
"""
global p_entering_vip_block, p_exiting_vip_block, p_vip_next, p_vip_number, p_vip_set
in_vip_block = False
vip_list = []
vip_elem = {}
order_keys = []
if (options.input_file != None):
... | 7,077 |
def launch(**kwargs):
""" Connects to PM320E and instantiates server
:param kwargs: (dict) containing relevant kwargs
:logger: instance of LogClient for logging purposes
:port: (int) port number for the Cnt Monitor server
"""
try:
settings = load_device_config('thorlabs_pm320e'... | 7,078 |
def update_wishlist_games(cur, table, wishlist_args, update_delay):
"""A function to update wishlist games.
:param cur: database cursor object
:type cur: Cursor
:param table: name of table to work on
:type table: str
:param wishlist_args: list of wishlist g... | 7,079 |
def WrapSignal(signal):
"""Wrap a model signal with a corresponding frontend wrapper."""
if type(signal) is M.BitsSignal:
return BitsFrontend(signal)
elif type(signal) is M.ListSignal:
return ListFrontend(signal)
elif type(signal) is M.BundleSignal:
return BundleFrontend(signal)... | 7,080 |
def is_array_like(element: Any) -> bool:
"""Returns `True` if `element` is a JAX array, a NumPy array, or a Python
`float`/`complex`/`bool`/`int`.
"""
return isinstance(
element, (jnp.ndarray, np.ndarray, float, complex, bool, int)
) or hasattr(element, "__jax_array__") | 7,081 |
def parse(javascript_code):
"""Returns syntax tree of javascript_code.
Syntax tree has the same structure as syntax tree produced by esprima.js
Same as PyJsParser().parse For your convenience :) """
p = PyJsParser()
return p.parse(javascript_code) | 7,082 |
def twitterAuth():
""" Authenticate user using Twitter API generated credentials """
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_KEY, ACCESS_SECRET)
return tweepy.API(auth, wait_on_rate_limit=True, wait_on_rate_limit_notify=True) | 7,083 |
def GetInstalledPackageUseFlags(pkg_str, board=None):
"""Gets the list of USE flags for installed packages matching |pkg_str|.
Args:
pkg_str: The package name with optional category, version, and slot.
board: The board to inspect.
Returns:
A dictionary with the key being a package CP and the value b... | 7,084 |
async def test_clear_snapshots(coresys: CoreSys, tmp_path):
"""Test snapshot cleanup."""
for slug in ["sn1", "sn2", "sn3", "sn4", "sn5"]:
temp_tar = Path(tmp_path, f"{slug}.tar")
with SecureTarFile(temp_tar, "w"):
pass
snapshot = Snapshot(coresys, temp_tar)
snapshot._... | 7,085 |
def test_scorekeeper_retrieve_by_slug(scorekeeper_slug: str):
"""Testing for :py:meth:`wwdtm.scorekeeper.Scorekeeper.retrieve_by_slug`
:param scorekeeper_slug: Scorekeeper slug string to test retrieving
scorekeeper information
"""
scorekeeper = Scorekeeper(connect_dict=get_connect_dict())
i... | 7,086 |
def make_cursor():
"""
Creates a cursor for iterating through results
GetParams:
account: an account
user: a user
handle: a shark client handle
Returns:
a json object container the cursor handle
"""
data, statusCode = cursor()
return jsonify(data), statusCo... | 7,087 |
def run_result_factory(data: list[tuple[Any, Any]]):
"""
We need to handle dt.datetime and agate.table.Table.
The rest of the types should already be JSON-serializable.
"""
d = {}
for key, val in data:
if isinstance(val, dt.datetime):
val = val.isoformat()
elif isinst... | 7,088 |
def compute_steepness(zeroth_moment, peak_wavenumber):
"""Compute characteristic steepness from given peak wave number."""
return np.sqrt(2 * zeroth_moment) * peak_wavenumber | 7,089 |
def test_getting_monthly_annual_temp_values():
""" Test that prior_months and prior_year values are correct
Values were hand-verified using panoply tables"""
ct = AlaskaTemperature()
ct.initialize_from_config_file()
# Test prior months values
# These are values starting from 2/1901
# ac... | 7,090 |
def secondary_side_radius(mass_ratio, surface_potential):
"""
Side radius of secondary component
:param mass_ratio: float;
:param surface_potential: float;
:return: float; side radius
"""
return calculate_side_radius(1.0, mass_ratio, 1.0, surface_potential, 'secondary') | 7,091 |
def pts_from_rect_inside(r):
""" returns start_pt, end_pt where end_pt is _inside_ the rectangle """
return (r[0], r[1]), ((r[0] + r[2] - 1), (r[1] + r[3] - 1)) | 7,092 |
def minimum_distance(object_1, object_2):
""" Takes two lists as input
A list of numpy arrays of coordinates that make up object 1 and object 2
Measures the distances between each of the coordinates
Returns the minimum distance between the two objects, as calculated using a vector norm
Stops the cal... | 7,093 |
def _park_ship(board: MyBoard, ship: Ship):
""" Send our ship to the nearest shipyard """
moves_left = board.moves_left
shipyard_to_distance = _get_shipyard_to_distance(board, ship.position, board.me)
shipyards = [s for s, d in shipyard_to_distance.items() if d <= moves_left]
if not shipyards:
... | 7,094 |
def retrieve_pkl_file(filename, verbose = False):
"""
Retrieve and return contents of pkl file
"""
if verbose == True:
start_time = timelib.time()
print("\n * Retrieving %s file ..."%filename)
data = pd.read_pickle(filename)
if verbose == True:
print("\n %s retrie... | 7,095 |
def extractIpsFile(containerFile,newSimName):
"""
Given a container file, get the ips file in it and write it to current
directory so that it can be used
"""
oldIpsFile=os.path.splitext(containerFile)[0]+os.extsep+"ips"
zf=zipfile.ZipFile(containerFile,"r")
foundFile=""
# Assume that c... | 7,096 |
def nplr(measure, N, rank=1, dtype=torch.float):
""" Return w, p, q, V, B such that
(w - p q^*, B) is unitarily equivalent to the original HiPPO A, B by the matrix V
i.e. A = V[w - p q^*]V^*, B = V B
"""
assert dtype == torch.float or torch.cfloat
if measure == 'random':
dtype = torch.cf... | 7,097 |
def read_data(oldest_year: int = 2020, newest_year: int = 2022):
"""Read in csv files of yearly covid data from the nytimes and concatenate into a single pandas DataFrame.
Args:
oldest_year: first year of data to use
newest_year: most recent year of data to use
"""
df_dicts = {} # diction... | 7,098 |
def ip_is_v4(ip: str) -> bool:
"""
Determines whether an IP address is IPv4 or not
:param str ip: An IP address as a string, e.g. 192.168.1.1
:raises ValueError: When the given IP address ``ip`` is invalid
:return bool: True if IPv6, False if not (i.e. probably IPv4)
"""
return type(ip_addr... | 7,099 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.