sentence1
stringlengths
52
3.87M
sentence2
stringlengths
1
47.2k
label
stringclasses
1 value
def get_argument_parser(): """ Get a parser that is able to parse program arguments. :return: instance of arparse.ArgumentParser """ parser = argparse.ArgumentParser(description=project.get_description(), epilog=_('Visit us at {website}.').format(website=project....
Get a parser that is able to parse program arguments. :return: instance of arparse.ArgumentParser
entailment
def task(self, queue: str = 'kuyruk', **kwargs: Any) -> Callable: """ Wrap functions with this decorator to convert them to *tasks*. After wrapping, calling the function will send a message to a queue instead of running the function. :param queue: Queue name for the tasks. ...
Wrap functions with this decorator to convert them to *tasks*. After wrapping, calling the function will send a message to a queue instead of running the function. :param queue: Queue name for the tasks. :param kwargs: Keyword arguments will be passed to :class:`~kuyruk.Task...
entailment
def channel(self) -> Iterator[amqp.Channel]: """Returns a new channel from a new connection as a context manager.""" with self.connection() as conn: ch = conn.channel() logger.info('Opened new channel') with _safe_close(ch): yield ch
Returns a new channel from a new connection as a context manager.
entailment
def connection(self) -> Iterator[amqp.Connection]: """Returns a new connection as a context manager.""" TCP_USER_TIMEOUT = 18 # constant is available on Python 3.6+. socket_settings = {TCP_USER_TIMEOUT: self.config.TCP_USER_TIMEOUT} if sys.platform.startswith('darwin'): del...
Returns a new connection as a context manager.
entailment
def scan_videopath(videopath, callback, recursive=False): """ Scan the videopath string for video files. :param videopath: Path object :param callback: Instance of ProgressCallback :param recursive: True if the scanning should happen recursive :return: tuple with list of videos and list of subti...
Scan the videopath string for video files. :param videopath: Path object :param callback: Instance of ProgressCallback :param recursive: True if the scanning should happen recursive :return: tuple with list of videos and list of subtitles (videos have matched subtitles)
entailment
def __scan_folder(folder_path, callback, recursive=False): """ Scan a folder for videos and subtitles :param folder_path: String of a directory :param callback: Instance of ProgressCallback :param recursive: True if the scanning should happen recursive :return: tuple with list of videos and list...
Scan a folder for videos and subtitles :param folder_path: String of a directory :param callback: Instance of ProgressCallback :param recursive: True if the scanning should happen recursive :return: tuple with list of videos and list of subtitles (videos have matched subtitles)
entailment
def merge_path_subvideo(path_subvideos, callback): """ Merge subtitles into videos. :param path_subvideos: a dict with paths as key and a list of lists of videos and subtitles :param callback: Instance of ProgressCallback :return: tuple with list of videos and list of subtitles (videos have matched ...
Merge subtitles into videos. :param path_subvideos: a dict with paths as key and a list of lists of videos and subtitles :param callback: Instance of ProgressCallback :return: tuple with list of videos and list of subtitles (videos have matched subtitles)
entailment
def filter_files_extensions(files, extension_lists): """ Put the files in buckets according to extension_lists files=[movie.avi, movie.srt], extension_lists=[[avi],[srt]] ==> [[movie.avi],[movie.srt]] :param files: A list of files :param extension_lists: A list of list of extensions :return: The...
Put the files in buckets according to extension_lists files=[movie.avi, movie.srt], extension_lists=[[avi],[srt]] ==> [[movie.avi],[movie.srt]] :param files: A list of files :param extension_lists: A list of list of extensions :return: The files filtered and sorted according to extension_lists
entailment
def detect_language_filename(cls, filename): """ Detect the language of a subtitle filename :param filename: filename of a subtitle :return: Language object, None if language could not be detected. """ log.debug('detect_language(filename="{}") ...'.format(filename)) ...
Detect the language of a subtitle filename :param filename: filename of a subtitle :return: Language object, None if language could not be detected.
entailment
def matches_video_filename(self, video): """ Detect whether the filename of videofile matches with this SubtitleFile. :param video: VideoFile instance :return: True if match """ vid_fn = video.get_filename() vid_base, _ = os.path.splitext(vid_fn) vid_base...
Detect whether the filename of videofile matches with this SubtitleFile. :param video: VideoFile instance :return: True if match
entailment
def logging_file_install(path): """ Install logger that will write to file. If this function has already installed a handler, replace it. :param path: path to the log file, Use None for default file location. """ if path is None: path = configuration_get_default_folder() / LOGGING_DEFAULTNAM...
Install logger that will write to file. If this function has already installed a handler, replace it. :param path: path to the log file, Use None for default file location.
entailment
def logging_stream_install(loglevel): """ Install logger that will output to stderr. If this function ha already installed a handler, replace it. :param loglevel: log level for the stream """ formatter = logging.Formatter(LOGGING_FORMAT) logger = logging.getLogger() logger.removeHandler(LOG...
Install logger that will output to stderr. If this function ha already installed a handler, replace it. :param loglevel: log level for the stream
entailment
def parseFromDelimitedString(obj, buf, offset=0): """ Stanford CoreNLP uses the Java "writeDelimitedTo" function, which writes the size (and offset) of the buffer before writing the object. This function handles parsing this message starting from offset 0. @returns how many bytes of @buf were consu...
Stanford CoreNLP uses the Java "writeDelimitedTo" function, which writes the size (and offset) of the buffer before writing the object. This function handles parsing this message starting from offset 0. @returns how many bytes of @buf were consumed.
entailment
def writeToDelimitedString(obj, stream=None): """ Stanford CoreNLP uses the Java "writeDelimitedTo" function, which writes the size (and offset) of the buffer before writing the object. This function handles parsing this message starting from offset 0. @returns how many bytes of @buf were consumed....
Stanford CoreNLP uses the Java "writeDelimitedTo" function, which writes the size (and offset) of the buffer before writing the object. This function handles parsing this message starting from offset 0. @returns how many bytes of @buf were consumed.
entailment
def to_text(sentence): """ Helper routine that converts a Sentence protobuf to a string from its tokens. """ text = "" for i, tok in enumerate(sentence.token): if i != 0: text += tok.before text += tok.word return text
Helper routine that converts a Sentence protobuf to a string from its tokens.
entailment
def showMessage(self, message, *args): """ Public method to show a message in the bottom part of the splashscreen. @param message message to be shown (string or QString) """ QSplashScreen.showMessage( self, message, Qt.AlignBottom | Qt.AlignRight | Qt.AlignAbsolute, ...
Public method to show a message in the bottom part of the splashscreen. @param message message to be shown (string or QString)
entailment
def parse_path(path): """ Parse a video at filepath, using pymediainfo framework. :param path: path of video to parse as string """ import pymediainfo metadata = Metadata() log.debug('pymediainfo: parsing "{path}" ...'.format(path=path)) parseRes = pymedi...
Parse a video at filepath, using pymediainfo framework. :param path: path of video to parse as string
entailment
def _read_metadata(self): """ Private function to read (if not read already) and store the metadata of the local VideoFile. """ if self._is_metadata_init(): return try: log.debug('Reading metadata of "{path}" ...'.format(path=self._filepath)) d...
Private function to read (if not read already) and store the metadata of the local VideoFile.
entailment
def get_size(self): """ Get size of this VideoFile in bytes :return: size as integer """ if self._size is None: self._size = self._filepath.stat().st_size return self._size
Get size of this VideoFile in bytes :return: size as integer
entailment
def get_osdb_hash(self): """ Get the hash of this local videofile :return: hash as string """ if self._osdb_hash is None: self._osdb_hash = self._calculate_osdb_hash() return self._osdb_hash
Get the hash of this local videofile :return: hash as string
entailment
def _calculate_osdb_hash(self): """ Calculate OSDB (OpenSubtitleDataBase) hash of this VideoFile :return: hash as string """ log.debug('_calculate_OSDB_hash() of "{path}" ...'.format(path=self._filepath)) f = self._filepath.open(mode='rb') file_size = self.get_si...
Calculate OSDB (OpenSubtitleDataBase) hash of this VideoFile :return: hash as string
entailment
def import_module(name: str) -> ModuleType: """Import module by it's name from following places in order: - main module - current working directory - Python path """ logger.debug("Importing module: %s", name) if name == main_module_name(): return main_module return import...
Import module by it's name from following places in order: - main module - current working directory - Python path
entailment
def main_module_name() -> str: """Returns main module and module name pair.""" if not hasattr(main_module, '__file__'): # running from interactive shell return None main_filename = os.path.basename(main_module.__file__) module_name, ext = os.path.splitext(main_filename) return modul...
Returns main module and module name pair.
entailment
def write_stream(src_file, destination_path): """ Write the file-like src_file object to the string dest_path :param src_file: file-like data to be written :param destination_path: string of the destionation file """ with open(destination_path, 'wb') as destination_file: shutil.copyfileo...
Write the file-like src_file object to the string dest_path :param src_file: file-like data to be written :param destination_path: string of the destionation file
entailment
def build_dirs(files): ''' Build necessary directories based on a list of file paths ''' for i in files: if type(i) is list: build_dirs(i) continue else: if len(i['path']) > 1: addpath = os.path.join(os.getcwd(), *i['path'][:-1]) ...
Build necessary directories based on a list of file paths
entailment
def get_want_file_pos(file_list): ''' Ask the user which files in file_list he or she is interested in. Return indices for the files inside file_list ''' want_file_pos = [] print '\nFiles contained:\n' for i in file_list: print(os.path.join(*i['path'])) while 1: all_answe...
Ask the user which files in file_list he or she is interested in. Return indices for the files inside file_list
entailment
def get_file_starts(file_list): ''' Return the starting position (in bytes) of a list of files by iteratively summing their lengths ''' starts = [] total = 0 for i in file_list: starts.append(total) total += i['length'] print starts return starts
Return the starting position (in bytes) of a list of files by iteratively summing their lengths
entailment
def get_rightmost_index(byte_index=0, file_starts=[0]): ''' Retrieve the highest-indexed file that starts at or before byte_index. ''' i = 1 while i <= len(file_starts): start = file_starts[-i] if start <= byte_index: return len(file_starts) - i else: ...
Retrieve the highest-indexed file that starts at or before byte_index.
entailment
def get_next_want_file(self, byte_index, block): ''' Returns the leftmost file in the user's list of wanted files (want_file_pos). If the first file it finds isn't in the list, it will keep searching until the length of 'block' is exceeded. ''' while block: ri...
Returns the leftmost file in the user's list of wanted files (want_file_pos). If the first file it finds isn't in the list, it will keep searching until the length of 'block' is exceeded.
entailment
def vis_init(self): ''' Sends the state of the BTC at the time the visualizer connects, initializing it. ''' init_dict = {} init_dict['kind'] = 'init' assert len(self.want_file_pos) == len(self.heads_and_tails) init_dict['want_file_pos'] = self.want_file_p...
Sends the state of the BTC at the time the visualizer connects, initializing it.
entailment
def broadcast(self, data_dict): ''' Send to the visualizer (if there is one) or enqueue for later ''' if self.vis_socket: self.queued_messages.append(data_dict) self.send_all_updates()
Send to the visualizer (if there is one) or enqueue for later
entailment
def bencode(canonical): ''' Turns a dictionary into a bencoded str with alphabetized keys e.g., {'spam': 'eggs', 'cow': 'moo'} --> d3:cow3:moo4:spam4:eggse ''' in_dict = dict(canonical) def encode_str(in_str): out_str = str(len(in_str)) + ':' + in_str return out_str ...
Turns a dictionary into a bencoded str with alphabetized keys e.g., {'spam': 'eggs', 'cow': 'moo'} --> d3:cow3:moo4:spam4:eggse
entailment
def bdecode(bstring): ''' Bdecodes a bencoded string e.g., d3:cow3:moo4:spam4:eggse -> {'cow': 'moo', 'spam': 'eggs'} ''' def get_val(): i = reader.next() if i.isdigit(): str_len = get_len(i) return get_str(str_len) if i == 'd': re...
Bdecodes a bencoded string e.g., d3:cow3:moo4:spam4:eggse -> {'cow': 'moo', 'spam': 'eggs'}
entailment
def build_payload(self): ''' Builds the payload that will be sent in tracker_request ''' payload = {} hashed_info = hashlib.sha1(tparser.bencode(self.torrent_dict['info'])) self.hash_string = hashed_info.digest() self.peer_id = ('-DR' + VERSION + ...
Builds the payload that will be sent in tracker_request
entailment
def tracker_request(self): ''' Sends the initial request to the tracker, compiling list of all peers announcing to the tracker ''' assert self.torrent_dict['info'] payload = self.build_payload() if self.torrent_dict['announce'].startswith('udp'): rai...
Sends the initial request to the tracker, compiling list of all peers announcing to the tracker
entailment
def get_peer_ips(self): ''' Generates list of peer IPs from tracker response. Note: not all of these IPs might be good, which is why we only init peer objects for the subset that respond to handshake ''' presponse = [ord(i) for i in self.tracker_response['peers']] ...
Generates list of peer IPs from tracker response. Note: not all of these IPs might be good, which is why we only init peer objects for the subset that respond to handshake
entailment
def handshake_peers(self): ''' pstrlen = length of pstr as one byte pstr = BitTorrent protocol reserved = chr(0)*8 info_hash = 20-byte hash above (aka self.hash_string) peer_id = 20-byte string ''' pstr = 'BitTorrent protocol' pstrlen = len(pstr) ...
pstrlen = length of pstr as one byte pstr = BitTorrent protocol reserved = chr(0)*8 info_hash = 20-byte hash above (aka self.hash_string) peer_id = 20-byte string
entailment
def initpeer(self, sock): ''' Creates a new peer object for a nvalid socket and adds it to reactor's listen list ''' location_json = requests.request("GET", "http://freegeoip.net/json/" + sock.getpeername()[0]).content location = j...
Creates a new peer object for a nvalid socket and adds it to reactor's listen list
entailment
def read(self): try: bytes = self.sock.recv(self.max_size) except: self.torrent.kill_peer(self) return ''' Chain of events: - process_input - check save_state and read length, id, and message accordingly - if we ...
Chain of events: - process_input - check save_state and read length, id, and message accordingly - if we have a piece (really a block), we piece.save it out inside call to ppiece - If we've completed a piece we: - Tell...
entailment
def ppiece(self, content): ''' Process a piece that we've received from a peer, writing it out to one or more files ''' piece_index, byte_begin = struct.unpack('!ii', content[0:8]) # TODO -- figure out a better way to catch this error. # How is piece_index gettin...
Process a piece that we've received from a peer, writing it out to one or more files
entailment
def is_connected(self): """ Returns the connection status of the data store. Returns: bool: ``True`` if the data store is connected to the MongoDB server. """ if self._client is not None: try: self._client.server_info() except Connecti...
Returns the connection status of the data store. Returns: bool: ``True`` if the data store is connected to the MongoDB server.
entailment
def connect(self): """ Establishes a connection to the MongoDB server. Use the MongoProxy library in order to automatically handle AutoReconnect exceptions in a graceful and reliable way. """ mongodb_args = { 'host': self.host, 'port': self.port, ...
Establishes a connection to the MongoDB server. Use the MongoProxy library in order to automatically handle AutoReconnect exceptions in a graceful and reliable way.
entailment
def exists(self, workflow_id): """ Checks whether a document with the specified workflow id already exists. Args: workflow_id (str): The workflow id that should be checked. Raises: DataStoreNotConnected: If the data store is not connected to the server. Returns...
Checks whether a document with the specified workflow id already exists. Args: workflow_id (str): The workflow id that should be checked. Raises: DataStoreNotConnected: If the data store is not connected to the server. Returns: bool: ``True`` if a document ...
entailment
def add(self, payload=None): """ Adds a new document to the data store and returns its id. Args: payload (dict): Dictionary of initial data that should be stored in the new document in the meta section. Raises: DataStoreNotConnected: If the data store is...
Adds a new document to the data store and returns its id. Args: payload (dict): Dictionary of initial data that should be stored in the new document in the meta section. Raises: DataStoreNotConnected: If the data store is not connected to the server. Re...
entailment
def remove(self, workflow_id): """ Removes a document specified by its id from the data store. All associated GridFs documents are deleted as well. Args: workflow_id (str): The id of the document that represents a workflow run. Raises: DataStoreNotConnected: If...
Removes a document specified by its id from the data store. All associated GridFs documents are deleted as well. Args: workflow_id (str): The id of the document that represents a workflow run. Raises: DataStoreNotConnected: If the data store is not connected to the ser...
entailment
def get(self, workflow_id): """ Returns the document for the given workflow id. Args: workflow_id (str): The id of the document that represents a workflow run. Raises: DataStoreNotConnected: If the data store is not connected to the server. Returns: ...
Returns the document for the given workflow id. Args: workflow_id (str): The id of the document that represents a workflow run. Raises: DataStoreNotConnected: If the data store is not connected to the server. Returns: DataStoreDocument: The document for the...
entailment
def get(self, key, default=None, *, section=DataStoreDocumentSection.Data): """ Return the field specified by its key from the specified section. This method access the specified section of the workflow document and returns the value for the given key. Args: key (str): The ...
Return the field specified by its key from the specified section. This method access the specified section of the workflow document and returns the value for the given key. Args: key (str): The key pointing to the value that should be retrieved. It supports MongoDB'...
entailment
def set(self, key, value, *, section=DataStoreDocumentSection.Data): """ Store a value under the specified key in the given section of the document. This method stores a value into the specified section of the workflow data store document. Any existing value is overridden. Before storing a valu...
Store a value under the specified key in the given section of the document. This method stores a value into the specified section of the workflow data store document. Any existing value is overridden. Before storing a value, any linked GridFS document under the specified key is deleted. ...
entailment
def push(self, key, value, *, section=DataStoreDocumentSection.Data): """ Appends a value to a list in the specified section of the document. Args: key (str): The key pointing to the value that should be stored/updated. It supports MongoDB's dot notation for nested fields. ...
Appends a value to a list in the specified section of the document. Args: key (str): The key pointing to the value that should be stored/updated. It supports MongoDB's dot notation for nested fields. value: The value that should be appended to a list in the data store. ...
entailment
def extend(self, key, values, *, section=DataStoreDocumentSection.Data): """ Extends a list in the data store with the elements of values. Args: key (str): The key pointing to the value that should be stored/updated. It supports MongoDB's dot notation for nested fields. ...
Extends a list in the data store with the elements of values. Args: key (str): The key pointing to the value that should be stored/updated. It supports MongoDB's dot notation for nested fields. values (list): A list of the values that should be used to extend the list ...
entailment
def _data_from_dotnotation(self, key, default=None): """ Returns the MongoDB data from a key using dot notation. Args: key (str): The key to the field in the workflow document. Supports MongoDB's dot notation for embedded fields. default (object): The default val...
Returns the MongoDB data from a key using dot notation. Args: key (str): The key to the field in the workflow document. Supports MongoDB's dot notation for embedded fields. default (object): The default value that is returned if the key does not exist. ...
entailment
def _encode_value(self, value): """ Encodes the value such that it can be stored into MongoDB. Any primitive types are stored directly into MongoDB, while non-primitive types are pickled and stored as GridFS objects. The id pointing to a GridFS object replaces the original value. ...
Encodes the value such that it can be stored into MongoDB. Any primitive types are stored directly into MongoDB, while non-primitive types are pickled and stored as GridFS objects. The id pointing to a GridFS object replaces the original value. Args: value (object): The obj...
entailment
def _decode_value(self, value): """ Decodes the value by turning any binary data back into Python objects. The method searches for ObjectId values, loads the associated binary data from GridFS and returns the decoded Python object. Args: value (object): The value that shoul...
Decodes the value by turning any binary data back into Python objects. The method searches for ObjectId values, loads the associated binary data from GridFS and returns the decoded Python object. Args: value (object): The value that should be decoded. Raises: D...
entailment
def _delete_gridfs_data(self, data): """ Delete all GridFS data that is linked by fields in the specified data. Args: data: The data that is parsed for MongoDB ObjectIDs. The linked GridFs object for any ObjectID is deleted. """ if isinstance(data, ObjectId):...
Delete all GridFS data that is linked by fields in the specified data. Args: data: The data that is parsed for MongoDB ObjectIDs. The linked GridFs object for any ObjectID is deleted.
entailment
def get_homecall(callsign): """Strips off country prefixes (HC2/DH1TW) and activity suffixes (DH1TW/P). Args: callsign (str): Amateur Radio callsign Returns: str: callsign without country/activity pre/suffixes Raises: ValueError: No callsign found i...
Strips off country prefixes (HC2/DH1TW) and activity suffixes (DH1TW/P). Args: callsign (str): Amateur Radio callsign Returns: str: callsign without country/activity pre/suffixes Raises: ValueError: No callsign found in string Example: ...
entailment
def _iterate_prefix(self, callsign, timestamp=timestamp_now): """truncate call until it corresponds to a Prefix in the database""" prefix = callsign if re.search('(VK|AX|VI)9[A-Z]{3}', callsign): #special rule for VK9 calls if timestamp > datetime(2006,1,1, tzinfo=UTC): ...
truncate call until it corresponds to a Prefix in the database
entailment
def _dismantle_callsign(self, callsign, timestamp=timestamp_now): """ try to identify the callsign's identity by analyzing it in the following order: Args: callsign (str): Amateur Radio callsign timestamp (datetime, optional): datetime in UTC (tzinfo=pytz.UTC) Raises: ...
try to identify the callsign's identity by analyzing it in the following order: Args: callsign (str): Amateur Radio callsign timestamp (datetime, optional): datetime in UTC (tzinfo=pytz.UTC) Raises: KeyError: Callsign could not be identified
entailment
def get_all(self, callsign, timestamp=timestamp_now): """ Lookup a callsign and return all data available from the underlying database Args: callsign (str): Amateur Radio callsign timestamp (datetime, optional): datetime in UTC (tzinfo=pytz.UTC) Returns: dic...
Lookup a callsign and return all data available from the underlying database Args: callsign (str): Amateur Radio callsign timestamp (datetime, optional): datetime in UTC (tzinfo=pytz.UTC) Returns: dict: Dictionary containing the callsign specific data Raise...
entailment
def is_valid_callsign(self, callsign, timestamp=timestamp_now): """ Checks if a callsign is valid Args: callsign (str): Amateur Radio callsign timestamp (datetime, optional): datetime in UTC (tzinfo=pytz.UTC) Returns: bool: True / False Example: ...
Checks if a callsign is valid Args: callsign (str): Amateur Radio callsign timestamp (datetime, optional): datetime in UTC (tzinfo=pytz.UTC) Returns: bool: True / False Example: The following checks if "DH1TW" is a valid callsign >>...
entailment
def get_lat_long(self, callsign, timestamp=timestamp_now): """ Returns Latitude and Longitude for a callsign Args: callsign (str): Amateur Radio callsign timestamp (datetime, optional): datetime in UTC (tzinfo=pytz.UTC) Returns: dict: Containing Latitude and...
Returns Latitude and Longitude for a callsign Args: callsign (str): Amateur Radio callsign timestamp (datetime, optional): datetime in UTC (tzinfo=pytz.UTC) Returns: dict: Containing Latitude and Longitude Raises: KeyError: No data found for cal...
entailment
def get_cqz(self, callsign, timestamp=timestamp_now): """ Returns CQ Zone of a callsign Args: callsign (str): Amateur Radio callsign timestamp (datetime, optional): datetime in UTC (tzinfo=pytz.UTC) Returns: int: containing the callsign's CQ Zone Ra...
Returns CQ Zone of a callsign Args: callsign (str): Amateur Radio callsign timestamp (datetime, optional): datetime in UTC (tzinfo=pytz.UTC) Returns: int: containing the callsign's CQ Zone Raises: KeyError: no CQ Zone found for callsign
entailment
def get_ituz(self, callsign, timestamp=timestamp_now): """ Returns ITU Zone of a callsign Args: callsign (str): Amateur Radio callsign timestamp (datetime, optional): datetime in UTC (tzinfo=pytz.UTC) Returns: int: containing the callsign's CQ Zone ...
Returns ITU Zone of a callsign Args: callsign (str): Amateur Radio callsign timestamp (datetime, optional): datetime in UTC (tzinfo=pytz.UTC) Returns: int: containing the callsign's CQ Zone Raises: KeyError: No ITU Zone found for callsign ...
entailment
def get_country_name(self, callsign, timestamp=timestamp_now): """ Returns the country name where the callsign is located Args: callsign (str): Amateur Radio callsign timestamp (datetime, optional): datetime in UTC (tzinfo=pytz.UTC) Returns: str: name of the...
Returns the country name where the callsign is located Args: callsign (str): Amateur Radio callsign timestamp (datetime, optional): datetime in UTC (tzinfo=pytz.UTC) Returns: str: name of the Country Raises: KeyError: No Country found for callsi...
entailment
def get_adif_id(self, callsign, timestamp=timestamp_now): """ Returns ADIF id of a callsign's country Args: callsign (str): Amateur Radio callsign timestamp (datetime, optional): datetime in UTC (tzinfo=pytz.UTC) Returns: int: containing the country ADIF id ...
Returns ADIF id of a callsign's country Args: callsign (str): Amateur Radio callsign timestamp (datetime, optional): datetime in UTC (tzinfo=pytz.UTC) Returns: int: containing the country ADIF id Raises: KeyError: No Country found for callsign
entailment
def get_continent(self, callsign, timestamp=timestamp_now): """ Returns the continent Identifier of a callsign Args: callsign (str): Amateur Radio callsign timestamp (datetime, optional): datetime in UTC (tzinfo=pytz.UTC) Returns: str: continent identified ...
Returns the continent Identifier of a callsign Args: callsign (str): Amateur Radio callsign timestamp (datetime, optional): datetime in UTC (tzinfo=pytz.UTC) Returns: str: continent identified Raises: KeyError: No Continent found for callsign ...
entailment
def find_indices(lst, element): """ Returns the indices for all occurrences of 'element' in 'lst'. Args: lst (list): List to search. element: Element to find. Returns: list: List of indices or values """ result = [] offset = -1 while True: try: ...
Returns the indices for all occurrences of 'element' in 'lst'. Args: lst (list): List to search. element: Element to find. Returns: list: List of indices or values
entailment
def from_name(cls, name, *, queue=DefaultJobQueueName.Workflow, clear_data_store=True, arguments=None): """ Create a workflow object from a workflow script. Args: name (str): The name of the workflow script. queue (str): Name of the queue the workflow should be...
Create a workflow object from a workflow script. Args: name (str): The name of the workflow script. queue (str): Name of the queue the workflow should be scheduled to. clear_data_store (bool): Remove any documents created during the workflow ...
entailment
def load(self, name, *, arguments=None, validate_arguments=True, strict_dag=False): """ Import the workflow script and load all known objects. The workflow script is treated like a module and imported into the Python namespace. After the import, the method looks for instances of known c...
Import the workflow script and load all known objects. The workflow script is treated like a module and imported into the Python namespace. After the import, the method looks for instances of known classes and stores a reference for further use in the workflow object. Args: ...
entailment
def run(self, config, data_store, signal_server, workflow_id): """ Run all autostart dags in the workflow. Only the dags that are flagged as autostart are started. Args: config (Config): Reference to the configuration object from which the settings for ...
Run all autostart dags in the workflow. Only the dags that are flagged as autostart are started. Args: config (Config): Reference to the configuration object from which the settings for the workflow are retrieved. data_store (DataStore): A DataStore...
entailment
def _queue_dag(self, name, *, data=None): """ Add a new dag to the queue. If the stop workflow flag is set, no new dag can be queued. Args: name (str): The name of the dag that should be queued. data (MultiTaskData): The data that should be passed on to the new dag. ...
Add a new dag to the queue. If the stop workflow flag is set, no new dag can be queued. Args: name (str): The name of the dag that should be queued. data (MultiTaskData): The data that should be passed on to the new dag. Raises: DagNameUnknown: If the speci...
entailment
def _handle_request(self, request): """ Handle an incoming request by forwarding it to the appropriate method. Args: request (Request): Reference to a request object containing the incoming request. Raises: RequestActionUnknown: If the act...
Handle an incoming request by forwarding it to the appropriate method. Args: request (Request): Reference to a request object containing the incoming request. Raises: RequestActionUnknown: If the action specified in the request is not known. ...
entailment
def _handle_start_dag(self, request): """ The handler for the start_dag request. The start_dag request creates a new dag and adds it to the queue. Args: request (Request): Reference to a request object containing the incoming request. The payload has ...
The handler for the start_dag request. The start_dag request creates a new dag and adds it to the queue. Args: request (Request): Reference to a request object containing the incoming request. The payload has to contain the foll...
entailment
def _handle_stop_workflow(self, request): """ The handler for the stop_workflow request. The stop_workflow request adds all running dags to the list of dags that should be stopped and prevents new dags from being started. The dags will then stop queueing new tasks, which will terminate ...
The handler for the stop_workflow request. The stop_workflow request adds all running dags to the list of dags that should be stopped and prevents new dags from being started. The dags will then stop queueing new tasks, which will terminate the dags and in turn the workflow. Ar...
entailment
def _handle_join_dags(self, request): """ The handler for the join_dags request. If dag names are given in the payload only return a valid Response if none of the dags specified by the names are running anymore. If no dag names are given, wait for all dags except one, which by design is...
The handler for the join_dags request. If dag names are given in the payload only return a valid Response if none of the dags specified by the names are running anymore. If no dag names are given, wait for all dags except one, which by design is the one that issued the request, to be fi...
entailment
def _handle_stop_dag(self, request): """ The handler for the stop_dag request. The stop_dag request adds a dag to the list of dags that should be stopped. The dag will then stop queueing new tasks and will eventually stop running. Args: request (Request): Reference to a req...
The handler for the stop_dag request. The stop_dag request adds a dag to the list of dags that should be stopped. The dag will then stop queueing new tasks and will eventually stop running. Args: request (Request): Reference to a request object containing the ...
entailment
def _handle_is_dag_stopped(self, request): """ The handler for the dag_stopped request. The dag_stopped request checks whether a dag is flagged to be terminated. Args: request (Request): Reference to a request object containing the incoming request. T...
The handler for the dag_stopped request. The dag_stopped request checks whether a dag is flagged to be terminated. Args: request (Request): Reference to a request object containing the incoming request. The payload has to contain the ...
entailment
def stop(self, consumer): """ This function is called when the worker received a request to terminate. Upon the termination of the worker, the workflows for all running jobs are stopped gracefully. Args: consumer (Consumer): Reference to the consumer object that handles mes...
This function is called when the worker received a request to terminate. Upon the termination of the worker, the workflows for all running jobs are stopped gracefully. Args: consumer (Consumer): Reference to the consumer object that handles messages ...
entailment
def start_dag(self, dag, *, data=None): """ Schedule the execution of a dag by sending a signal to the workflow. Args: dag (Dag, str): The dag object or the name of the dag that should be started. data (MultiTaskData): The data that should be passed on to the new dag. R...
Schedule the execution of a dag by sending a signal to the workflow. Args: dag (Dag, str): The dag object or the name of the dag that should be started. data (MultiTaskData): The data that should be passed on to the new dag. Returns: str: The name of the successfull...
entailment
def join_dags(self, names=None): """ Wait for the specified dags to terminate. This function blocks until the specified dags terminate. If no dags are specified wait for all dags of the workflow, except the dag of the task calling this signal, to terminate. Args: na...
Wait for the specified dags to terminate. This function blocks until the specified dags terminate. If no dags are specified wait for all dags of the workflow, except the dag of the task calling this signal, to terminate. Args: names (list): The names of the dags that have t...
entailment
def stop_dag(self, name=None): """ Send a stop signal to the specified dag or the dag that hosts this task. Args: name str: The name of the dag that should be stopped. If no name is given the dag that hosts this task is stopped. Upon receiving the stop signal,...
Send a stop signal to the specified dag or the dag that hosts this task. Args: name str: The name of the dag that should be stopped. If no name is given the dag that hosts this task is stopped. Upon receiving the stop signal, the dag will not queue any new tasks and w...
entailment
def is_stopped(self): """ Check whether the task received a stop signal from the workflow. Tasks can use the stop flag to gracefully terminate their work. This is particularly important for long running tasks and tasks that employ an infinite loop, such as trigger tasks. Return...
Check whether the task received a stop signal from the workflow. Tasks can use the stop flag to gracefully terminate their work. This is particularly important for long running tasks and tasks that employ an infinite loop, such as trigger tasks. Returns: bool: True if the t...
entailment
def event_stream(app, *, filter_by_prefix=None): """ Generator function that returns celery events. This function turns the callback based celery event handling into a generator. Args: app: Reference to a celery application object. filter_by_prefix (str): If not None, only allow events tha...
Generator function that returns celery events. This function turns the callback based celery event handling into a generator. Args: app: Reference to a celery application object. filter_by_prefix (str): If not None, only allow events that have a type that start...
entailment
def create_event_model(event): """ Factory function that turns a celery event into an event object. Args: event (dict): A dictionary that represents a celery event. Returns: object: An event object representing the received event. Raises: JobEventTypeUnsupported: If an unsuppo...
Factory function that turns a celery event into an event object. Args: event (dict): A dictionary that represents a celery event. Returns: object: An event object representing the received event. Raises: JobEventTypeUnsupported: If an unsupported celery job event was received. ...
entailment
def config_required(f): """ Decorator that checks whether a configuration file was set. """ def new_func(obj, *args, **kwargs): if 'config' not in obj: click.echo(_style(obj.get('show_color', False), 'Could not find a valid configuration file!', ...
Decorator that checks whether a configuration file was set.
entailment
def ingest_config_obj(ctx, *, silent=True): """ Ingest the configuration object into the click context. """ try: ctx.obj['config'] = Config.from_file(ctx.obj['config_path']) except ConfigLoadError as err: click.echo(_style(ctx.obj['show_color'], str(err), fg='red', bold=True)) if not...
Ingest the configuration object into the click context.
entailment
def cli(ctx, config, no_color): """ Command line client for lightflow. A lightweight, high performance pipeline system for synchrotrons. Lightflow is being developed at the Australian Synchrotron. """ ctx.obj = { 'show_color': not no_color if no_color is not None else True, 'config_...
Command line client for lightflow. A lightweight, high performance pipeline system for synchrotrons. Lightflow is being developed at the Australian Synchrotron.
entailment
def config_default(dest): """ Create a default configuration file. \b DEST: Path or file name for the configuration file. """ conf_path = Path(dest).resolve() if conf_path.is_dir(): conf_path = conf_path / LIGHTFLOW_CONFIG_NAME conf_path.write_text(Config.default()) click.echo(...
Create a default configuration file. \b DEST: Path or file name for the configuration file.
entailment
def config_list(ctx): """ List the current configuration. """ ingest_config_obj(ctx, silent=False) click.echo(json.dumps(ctx.obj['config'].to_dict(), indent=4))
List the current configuration.
entailment
def config_examples(dest, user_dir): """ Copy the example workflows to a directory. \b DEST: Path to which the examples should be copied. """ examples_path = Path(lightflow.__file__).parents[1] / 'examples' if examples_path.exists(): dest_path = Path(dest).resolve() if not user_...
Copy the example workflows to a directory. \b DEST: Path to which the examples should be copied.
entailment
def workflow_list(obj): """ List all available workflows. """ try: for wf in list_workflows(config=obj['config']): click.echo('{:23} {}'.format( _style(obj['show_color'], wf.name, bold=True), wf.docstring.split('\n')[0] if wf.docstring is not None else '')) ...
List all available workflows.
entailment
def workflow_start(obj, queue, keep_data, name, workflow_args): """ Send a workflow to the queue. \b NAME: The name of the workflow that should be started. WORKFLOW_ARGS: Workflow arguments in the form key1=value1 key2=value2. """ try: start_workflow(name=name, co...
Send a workflow to the queue. \b NAME: The name of the workflow that should be started. WORKFLOW_ARGS: Workflow arguments in the form key1=value1 key2=value2.
entailment
def workflow_stop(obj, names): """ Stop one or more running workflows. \b NAMES: The names, ids or job ids of the workflows that should be stopped. Leave empty to stop all running workflows. """ if len(names) == 0: msg = 'Would you like to stop all workflows?' else: m...
Stop one or more running workflows. \b NAMES: The names, ids or job ids of the workflows that should be stopped. Leave empty to stop all running workflows.
entailment
def workflow_status(obj, details): """ Show the status of the workflows. """ show_colors = obj['show_color'] config_cli = obj['config'].cli if details: temp_form = '{:>{}} {:20} {:25} {:25} {:38} {}' else: temp_form = '{:>{}} {:20} {:25} {} {} {}' click.echo('\n') click.e...
Show the status of the workflows.
entailment
def worker_start(obj, queues, name, celery_args): """ Start a worker process. \b CELERY_ARGS: Additional Celery worker command line arguments. """ try: start_worker(queues=queues.split(','), config=obj['config'], name=name, cele...
Start a worker process. \b CELERY_ARGS: Additional Celery worker command line arguments.
entailment
def worker_stop(obj, worker_ids): """ Stop running workers. \b WORKER_IDS: The IDs of the worker that should be stopped or none to stop them all. """ if len(worker_ids) == 0: msg = 'Would you like to stop all workers?' else: msg = '\n{}\n\n{}'.format('\n'.join(worker_ids), ...
Stop running workers. \b WORKER_IDS: The IDs of the worker that should be stopped or none to stop them all.
entailment
def worker_status(obj, filter_queues, details): """ Show the status of all running workers. """ show_colors = obj['show_color'] f_queues = filter_queues.split(',') if filter_queues is not None else None workers = list_workers(config=obj['config'], filter_by_queues=f_queues) if len(workers) == 0: ...
Show the status of all running workers.
entailment
def monitor(ctx, details): """ Show the worker and workflow event stream. """ ingest_config_obj(ctx, silent=False) show_colors = ctx.obj['show_color'] event_display = { JobEventName.Started: {'color': 'blue', 'label': 'started'}, JobEventName.Succeeded: {'color': 'green', 'label': 'suc...
Show the worker and workflow event stream.
entailment
def ext(obj, ext_name, ext_args): """ Run an extension by its name. \b EXT_NAME: The name of the extension. EXT_ARGS: Arguments that are passed to the extension. """ try: mod = import_module('lightflow_{}.__main__'.format(ext_name)) mod.main(ext_args) except ImportError as e...
Run an extension by its name. \b EXT_NAME: The name of the extension. EXT_ARGS: Arguments that are passed to the extension.
entailment
def _style(enabled, text, **kwargs): """ Helper function to enable/disable styled output text. Args: enable (bool): Turn on or off styling. text (string): The string that should be styled. kwargs (dict): Parameters that are passed through to click.style Returns: string: The...
Helper function to enable/disable styled output text. Args: enable (bool): Turn on or off styling. text (string): The string that should be styled. kwargs (dict): Parameters that are passed through to click.style Returns: string: The input with either the styling applied (enabl...
entailment
def freq_to_band(freq): """converts a Frequency [kHz] into the band and mode according to the IARU bandplan Note: **DEPRECATION NOTICE** This function has been moved to pyhamtools.frequency with PyHamTools 0.4.1 Please don't use this module/function anymore. It will be r...
converts a Frequency [kHz] into the band and mode according to the IARU bandplan Note: **DEPRECATION NOTICE** This function has been moved to pyhamtools.frequency with PyHamTools 0.4.1 Please don't use this module/function anymore. It will be removed soon.
entailment