From da4a24d79f58f69ebb780a0364a4ce2370a2cd22 Mon Sep 17 00:00:00 2001 From: hackrush Date: Sat, 24 Feb 2018 23:43:29 +0530 Subject: [PATCH 1/5] Added scripts to autogenerate docs and api from docstring Summary of changes Removed single dashed(short args) arguments(possibly breaking changes for app side) Standardised the docstrings Added scripts to autogenerate API and CLI documentation using the docstrings --- CHANGELOG.md | 45 ++ docs/cli.md | 750 ++++++++++++++++++++++--------- docs/index.md | 950 +++++++++++++++++++++++++++++---------- lbrynet/daemon/Daemon.py | 447 +++++++++++------- mkdocs.yml | 2 +- scripts/gen_api_docs.py | 102 +++-- scripts/gen_cli_docs.py | 85 ++++ 7 files changed, 1727 insertions(+), 654 deletions(-) create mode 100644 scripts/gen_cli_docs.py diff --git a/CHANGELOG.md b/CHANGELOG.md index e711777da..9e360faa6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -14,6 +14,22 @@ at anytime. ### Fixed * + * improper parsing of arguments to CLI settings_set (https://github.com/lbryio/lbry/issues/930) + * unnecessarily verbose exchange rate error (https://github.com/lbryio/lbry/issues/984) + * value error due to a race condition when saving to the claim cache (https://github.com/lbryio/lbry/issues/1013) + * being unable to re-download updated content (https://github.com/lbryio/lbry/issues/951) + * sending error messages for failed api requests + * file manager startup being slow when handling thousands of files + * handling decryption error for blobs encrypted with an invalid key + * handling stream with no data blob (https://github.com/lbryio/lbry/issues/905) + * fetching the external ip + * `blob_list` returning an error with --uri parameter and incorrectly returning `[]` for streams where blobs are known (https://github.com/lbryio/lbry/issues/895) + * `get` failing with a non-useful error message when given a uri for a channel claim + * exception checking in several wallet unit tests + * daemon not erring properly for non-numeric values being passed to the `bid` parameter for the `publish` method + * `publish` command to allow updating claims with a `bid` amount higher than the wallet balance, so long as the amount is less than the wallet balance plus the bid amount of the claim being updated (https://github.com/lbryio/lbry/issues/748) + * incorrect `blob_num` for the stream terminator blob, which would result in creating invalid streams. Such invalid streams are detected on startup and are automatically removed (https://github.com/lbryio/lbry/issues/1124) + * fixed the inconsistencies in docstrings * ### Deprecated @@ -23,6 +39,24 @@ at anytime. ### Changed * * +### Added + * link to instructions on how to change the default peer port + * `lbrynet-console`, a tool to run or connect to lbrynet-daemon and launch an interactive python console with the api functions built in. + * `--conf` CLI flag to specify an alternate config file + * `peer_port`, `disable_max_key_fee`, `auto_renew_claim_height_delta`, `blockchain_name`, and `lbryum_servers` to configurable settings + * `wallet_unlock` command (available during startup to unlock an encrypted wallet) + * support for wallet encryption via new commands `wallet_decrypt` and `wallet_encrypt` + * `channel_import`, `channel_export`, and `claim_renew` commands + * `blob_availability` and `stream_availability` commands for debugging download issues + * a new startup stage to indicate if the daemon is waiting for the `wallet_unlock` command. + * `abandon_info` dictionary (containing `claim_name`, `claim_id`, `address`, `amount`, `balance_delta` and `nout`) for claims, supports, and updates returned by `transaction_list` + * `permanent_url` string to `channel_list_mine`, `claim_list`, `claim_show`, `resolve` and `resolve_name` (see lbryio/lbryum#203) + * `is_mine` boolean to `channel_list` results + * `txid`, `nout`, `channel_claim_id`, `channel_claim_name`, `status`, `blobs_completed`, and `blobs_in_stream` fields to file objects returned by `file_list` and `get` + * `txid`, `nout`, `channel_claim_id`, and `channel_claim_name` filters for `file` commands (`file_list`, `file_set_status`, `file_reflect`, and `file_delete`) + * unit tests for `SQLiteStorage` and updated old tests for relevant changes (https://github.com/lbryio/lbry/issues/1088) + * scripts to autogenerate documentation + * ### Added * @@ -105,6 +139,17 @@ at anytime. * old storage classes used by the file manager, wallet, and blob manager * old `.db` database files from the data directory + * `seccure` and `gmpy` dependencies + * support for positional arguments in cli `settings_set`. Now only accepts settings changes in the form `--setting_key=value` + * `auto_re_reflect` setting from the conf file, use the `reflect_uploads` setting instead + * `name` argument for `claim_show` command + * `message` response field in file objects returned by `file_list` and `get` + * `include_tip_info` argument from `transaction_list`, which will now always include tip information. + * old and unused UI related code + * unnecessary `TempBlobManager` class + * old storage classes used by the file manager, wallet, and blob manager + * old `.db` database files from the data directory + * short(single dashed) arguments ## [0.18.0] - 2017-11-08 ### Fixed diff --git a/docs/cli.md b/docs/cli.md index 8f0c0ff0f..b4a4c579e 100644 --- a/docs/cli.md +++ b/docs/cli.md @@ -6,22 +6,47 @@ Announce blobs to the DHT Usage: - blob_announce [-a] [ | --blob_hash=] + blob_announce [--announce_all] [ | --blob_hash=] [ | --stream_hash=] [ | --sd_hash=] + Options: - -a : announce all the blobs possessed by user - , --blob_hash= : announce a blob, specified by blob_hash - , --stream_hash= : announce all blobs associated with - stream_hash - , --sd_hash= : announce all blobs associated with - sd_hash and the sd_hash itself + --announce_all= : (bool) announce all the blobs possessed by user + --blob_hash= : (str) announce a blob, specified by blob_hash + --stream_hash= : (str) announce all blobs associated with + stream_hash + --sd_hash= : (str) announce all blobs associated with + sd_hash and the sd_hash itself Returns: (bool) true if successful ``` +## blob_availability + +```text +Get blob availability + +Usage: + blob_availability () [ | --search_timeout=] + [ | --blob_timeout=] + + +Options: + --blob_hash= : (str) check availability for this blob hash + --search_timeout= : (int) how long to search for peers for the blob + in the dht + --blob_timeout= : (int) how long to try downloading from a peer + +Returns: + (dict) { + "is_available": + "reachable_peers": [":"], + "unreachable_peers": [":"] + } +``` + ## blob_delete ```text @@ -30,6 +55,10 @@ Delete a blob Usage: blob_delete ( | --blob_hash= : (str) blob hash of the blob to delete + Returns: (str) Success/fail message ``` @@ -43,18 +72,20 @@ Usage: blob_get ( | --blob_hash=) [--timeout=] [--encoding=] [--payment_rate_manager=] -Options: ---timeout= : timeout in number of seconds ---encoding= : by default no attempt at decoding is made, - can be set to one of the - following decoders: - 'json' ---payment_rate_manager= : if not given the default payment rate - manager will be used. - supported alternative rate managers: - 'only-free' -Returns +Options: + --blob_hash= : (str) blob hash of the blob to get + --timeout= : (int) timeout in number of seconds + --encoding= : (str) by default no attempt at decoding + is made, can be set to one of the + following decoders: + 'json' + --payment_rate_manager= : (str) if not given the default payment rate + manager will be used. + supported alternative rate managers: + 'only-free' + +Returns: (str) Success/Fail message or (dict) decoded data ``` @@ -64,18 +95,21 @@ Returns Returns blob hashes. If not given filters, returns all blobs known by the blob manager Usage: - blob_list [-n] [-f] [ | --uri=] [ | --stream_hash=] - [ | --sd_hash=] [ | --page_size=] + blob_list [--needed] [--finished] [ | --uri=] + [ | --stream_hash=] + [ | --sd_hash=] + [ | --page_size=] [ | --page=] + Options: - -n : only return needed blobs - -f : only return finished blobs - , --uri= : filter blobs by stream in a uri - , --stream_hash= : filter blobs by stream hash - , --sd_hash= : filter blobs by sd hash - , --page_size= : results page size - , --page= : page of results to return + --needed : (bool) only return needed blobs + --finished : (bool) only return finished blobs + --uri= : (str) filter blobs by stream in a uri + --stream_hash= : (str) filter blobs by stream hash + --sd_hash= : (str) filter blobs by sd hash + --page_size= : (int) results page size + --page= : (int) page of results to return Returns: (list) List of blob hashes @@ -89,6 +123,10 @@ Reflects all saved blobs Usage: blob_reflect_all + +Options: + None + Returns: (bool) true if successful ``` @@ -101,9 +139,10 @@ Get contents of a block Usage: block_show ( | --blockhash=) | ( | --height=) + Options: - , --blockhash= : hash of the block to look up - , --height= : height of the block to look up + --blockhash= : (str) hash of the block to look up + --height= : (int) height of the block to look up Returns: (dict) Requested block @@ -117,6 +156,10 @@ Export serialized channel signing information for a given certificate claim id Usage: channel_export ( | --claim_id=) + +Options: + --claim_id= : (str) Claim ID to export information about + Returns: (str) Serialized certificate information ``` @@ -130,6 +173,10 @@ Usage: channel_import ( | --serialized_certificate_info=) + +Options: + --serialized_certificate_info= : (str) certificate info + Returns: (dict) Result dictionary ``` @@ -142,6 +189,10 @@ Get certificate claim infos for channels that can be published to Usage: channel_list + +Options: + None + Returns: (list) ClaimDict, includes 'is_mine' field to indicate if the certificate claim is in the wallet. @@ -156,6 +207,11 @@ Usage: channel_new ( | --channel_name=) ( | --amount=) + +Options: + --channel_name= : (str) name of the channel prefixed with '@' + --amount= : (float) bid amount on the channel + Returns: (dict) Dictionary containing result of the claim { @@ -176,7 +232,13 @@ Usage: claim_abandon [ | --claim_id=] [ | --txid=] [ | --nout=] -Return: + +Options: + --claim_id= : (str) claim_id of the claim to abandon + --txid= : (str) txid of the claim to abandon + --nout= : (int) nout of the claim to abandon + +Returns: (dict) Dictionary containing result of the claim { txid : (str) txid of resulting transaction @@ -192,7 +254,11 @@ List current claims and information about them for a given name Usage: claim_list ( | --name=) -Returns + +Options: + --name= : (str) name of the claim to list info about + +Returns: (dict) State of claims assigned for the name { 'claims': (list) list of claims for the name @@ -205,6 +271,7 @@ Returns 'height': (int) height of block containing the claim 'txid': (str) txid of the claim 'nout': (int) nout of the claim + 'permanent_url': (str) permanent url of the claim, 'supports': (list) a list of supports attached to the claim 'value': (str) the value of the claim }, @@ -223,10 +290,13 @@ Usage: claim_list_by_channel ( | --uri=) [...] [--page=] [--page_size=] + Options: - --page= : which page of results to return where page 1 is the first - page, defaults to no pages - --page_size= : number of results in a page, default of 10 + --uri= : (str) uri of the channel + --uris= : (list) uris of the channel + --page= : (int) which page of results to return where page 1 is the first + page, defaults to no pages + --page_size= : (int) number of results in a page, default of 10 Returns: { @@ -274,7 +344,11 @@ List my name claims Usage: claim_list_mine -Returns + +Options: + None + +Returns: (list) List of name claims owned by user [ { @@ -289,6 +363,7 @@ Returns 'height': (int) height of the block containing the claim 'is_spent': (bool) true if claim is abandoned, false otherwise 'name': (str) name of the claim + 'permanent_url': (str) permanent url of the claim, 'txid': (str) txid of the cliam 'nout': (int) nout of the claim 'value': (str) value of the claim @@ -305,7 +380,13 @@ Usage: claim_new_support ( | --name=) ( | --claim_id=) ( | --amount=) -Return: + +Options: + --name= : (str) name of the claim to support + --claim_id= : (str) claim_id of the claim to support + --amount= : (float) amount of support + +Returns: (dict) Dictionary containing result of the claim { txid : (str) txid of resulting support claim @@ -322,7 +403,12 @@ Renew claim(s) or support(s) Usage: claim_renew ( | --outpoint=) | ( | --height=) -Return: + +Options: + --outpoint= : (str) outpoint of the claim to renew + --height= : (str) update claims expiring before or at this block height + +Returns: (dict) Dictionary where key is the the original claim's outpoint and value is the result of the renewal { @@ -347,9 +433,22 @@ Usage: (
| --address=
) [ | --amount=] + Options: - : Amount of credits to claim name for, defaults to the current amount - on the claim + --claim_id= : (str) claim_id to send + --address=
: (str) address to send the claim to + --amount : (int) Amount of credits to claim name for, defaults to the current amount + on the claim + +Returns: + (dict) Dictionary containing result of the claim + { + 'tx' : (str) hex encoded transaction + 'txid' : (str) txid of resulting claim + 'nout' : (int) nout of the resulting claim + 'fee' : (float) fee paid for the claim transaction + 'claim_id' : (str) claim ID of the resulting claim + } ``` ## claim_show @@ -361,12 +460,13 @@ Usage: claim_show [ | --txid=] [ | --nout=] [ | --claim_id=] + Options: - , --txid= : look for claim with this txid, nout must - also be specified - , --nout= : look for claim with this nout, txid must - also be specified - , --claim_id= : look for claim with this claim id + --txid= : (str) look for claim with this txid, nout must + also be specified + --nout= : (int) look for claim with this nout, txid must + also be specified + --claim_id= : (str) look for claim with this claim id Returns: (dict) Dictionary containing claim info as below, @@ -393,15 +493,19 @@ Returns: ```text This command is only for testing the CLI argument parsing Usage: - cli_test_command [-a] [-b] ( | --pos_arg=) + cli_test_command [--a_arg] [--b_arg] ( | --pos_arg=) [...] [--pos_arg2=] [--pos_arg3=] + Options: - -a, --a_arg : a arg - -b, --b_arg : b arg - , --pos_arg2= : pos arg 2 - , --pos_arg3= : pos arg 3 + --a_arg : a arg + --b_arg : b arg + --pos_arg= : pos arg + --pos_args= : pos args + --pos_arg2= : pos arg 2 + --pos_arg3= : pos arg 3 + Returns: pos args ``` @@ -414,6 +518,10 @@ Return a list of available commands Usage: commands + +Options: + None + Returns: (list) list of available commands ``` @@ -426,6 +534,10 @@ Stop lbrynet-daemon Usage: daemon_stop + +Options: + None + Returns: (string) Shutdown message ``` @@ -436,19 +548,28 @@ Returns: Delete a LBRY file Usage: - file_delete [-f] [--delete_all] [--sd_hash=] [--file_name=] - [--stream_hash=] [--rowid=] + file_delete [--delete_from_download_dir] [--delete_all] [--sd_hash=] [--file_name=] + [--stream_hash=] [--rowid=] [--claim_id=] [--txid=] + [--nout=] [--claim_name=] [--channel_claim_id=] + [--channel_name=] + Options: - -f, --delete_from_download_dir : delete file from download directory, - instead of just deleting blobs - --delete_all : if there are multiple matching files, - allow the deletion of multiple files. - Otherwise do not delete anything. - --sd_hash= : delete by file sd hash - --file_name : delete by file name in downloads folder - --stream_hash= : delete by file stream hash - --rowid= : delete by file row id + --delete_from_download_dir : (bool) delete file from download directory, + instead of just deleting blobs + --delete_all : (bool) if there are multiple matching files, + allow the deletion of multiple files. + Otherwise do not delete anything. + --sd_hash= : (str) delete by file sd hash + --file_name : (str) delete by file name in downloads folder + --stream_hash= : (str) delete by file stream hash + --rowid= : (int) delete by file row id + --claim_id= : (str) delete by file claim id + --txid= : (str) delete by file claim txid + --nout= : (int) delete by file claim nout + --claim_name= : (str) delete by file claim name + --channel_claim_id= : (str) delete by file channel claim id + --channel_name= : (str) delete by file channel claim name Returns: (bool) true if deletion was successful @@ -461,16 +582,26 @@ List files limited by optional filters Usage: file_list [--sd_hash=] [--file_name=] [--stream_hash=] - [--rowid=] - [-f] + [--rowid=] [--claim_id=] [--outpoint=] [--txid=] [--nout=] + [--channel_claim_id=] [--channel_name=] + [--claim_name=] [--full_status] + Options: - --sd_hash= : get file with matching sd hash - --file_name= : get file with matching file name in the - downloads folder - --stream_hash= : get file with matching stream hash - --rowid= : get file with matching row id - -f : full status, populate the 'message' and 'size' fields + --sd_hash= : (str) get file with matching sd hash + --file_name= : (str) get file with matching file name in the + downloads folder + --stream_hash= : (str) get file with matching stream hash + --rowid= : (int) get file with matching row id + --claim_id= : (str) get file with matching claim id + --outpoint= : (str) get file with matching claim outpoint + --txid= : (str) get file with matching claim txid + --nout= : (int) get file with matching claim nout + --channel_claim_id= : (str) get file with matching channel claim id + --channel_name= : (str) get file with matching channel name + --claim_name= : (str) get file with matching claim name + --full_status : (bool) full status, populate the + 'message' and 'size' fields Returns: (list) List of files @@ -489,9 +620,19 @@ Returns: 'download_path': (str) download path of file, 'mime_type': (str) mime type of file, 'key': (str) key attached to file, - 'total_bytes': (int) file size in bytes, None if full_status is false - 'written_bytes': (int) written size in bytes - 'message': (str), None if full_status is false + 'total_bytes': (int) file size in bytes, None if full_status is false, + 'written_bytes': (int) written size in bytes, + 'blobs_completed': (int) num_completed, None if full_status is false, + 'blobs_in_stream': (int) None if full_status is false, + 'status': (str) downloader status, None if full_status is false, + 'claim_id': (str) None if full_status is false or if claim is not found, + 'outpoint': (str) None if full_status is false or if claim is not found, + 'txid': (str) None if full_status is false or if claim is not found, + 'nout': (int) None if full_status is false or if claim is not found, + 'metadata': (dict) None if full_status is false or if claim is not found, + 'channel_claim_id': (str) None if full_status is false or if claim is not found or signed, + 'channel_name': (str) None if full_status is false or if claim is not found or signed, + 'claim_name': (str) None if full_status is false or if claim is not found }, ] ``` @@ -506,14 +647,15 @@ Usage: [--stream_hash=] [--rowid=] [--reflector=] + Options: - --sd_hash= : get file with matching sd hash - --file_name= : get file with matching file name in the - downloads folder - --stream_hash= : get file with matching stream hash - --rowid= : get file with matching row id - --reflector= : reflector server, ip address or url - by default choose a server from the config + --sd_hash= : (str) get file with matching sd hash + --file_name= : (str) get file with matching file name in the + downloads folder + --stream_hash= : (str) get file with matching stream hash + --rowid= : (int) get file with matching row id + --reflector= : (str) reflector server, ip address or url + by default choose a server from the config Returns: (list) list of blobs reflected @@ -525,15 +667,17 @@ Returns: Start or stop downloading a file Usage: - file_set_status [--sd_hash=] [--file_name=] - [--stream_hash=] [--rowid=] + file_set_status ( | --status=) [--sd_hash=] + [--file_name=] [--stream_hash=] [--rowid=] + Options: - --sd_hash= : set status of file with matching sd hash - --file_name= : set status of file with matching file name in the - downloads folder - --stream_hash= : set status of file with matching stream hash - --rowid= : set status of file with matching row id + --status= : (str) one of "start" or "stop" + --sd_hash= : (str) set status of file with matching sd hash + --file_name= : (str) set status of file with matching file name in the + downloads folder + --stream_hash= : (str) set status of file with matching stream hash + --rowid= : (int) set status of file with matching row id Returns: (str) Confirmation message @@ -548,10 +692,11 @@ Usage: get [ | --file_name=] [ | --timeout=] + Options: - : specified name for the downloaded file - : download timeout in number of seconds - : path to directory where file will be saved + --uri= : (str) uri of the content to download + --file_name= : (str) specified name for the downloaded file + --timeout= : (int) download timeout in number of seconds Returns: (dict) Dictionary containing information about the stream @@ -562,39 +707,28 @@ Returns: 'points_paid': (float) credit paid to download file, 'stopped': (bool) true if download is stopped, 'stream_hash': (str) stream hash of file, - 'stream_name': (str) stream name, + 'stream_name': (str) stream name , 'suggested_file_name': (str) suggested file name, 'sd_hash': (str) sd hash of file, - 'name': (str) name claim attached to file - 'outpoint': (str) claim outpoint attached to file - 'claim_id': (str) claim ID attached to file, 'download_path': (str) download path of file, 'mime_type': (str) mime type of file, 'key': (str) key attached to file, - 'total_bytes': (int) file size in bytes, None if full_status is false - 'written_bytes': (int) written size in bytes - 'message': (str), None if full_status is false - 'metadata': (dict) Metadata dictionary + 'total_bytes': (int) file size in bytes, None if full_status is false, + 'written_bytes': (int) written size in bytes, + 'blobs_completed': (int) num_completed, None if full_status is false, + 'blobs_in_stream': (int) None if full_status is false, + 'status': (str) downloader status, None if full_status is false, + 'claim_id': (str) claim id, + 'outpoint': (str) claim outpoint string, + 'txid': (str) claim txid, + 'nout': (int) claim nout, + 'metadata': (dict) claim metadata, + 'channel_claim_id': (str) None if claim is not signed + 'channel_name': (str) None if claim is not signed + 'claim_name': (str) claim name } ``` -## get_availability - -```text -Get stream availability for lbry uri - -Usage: - get_availability ( | --uri=) [ | --sd_timeout=] - [ | --peer_timeout=] - -Options: - , --sd_timeout= : sd blob download timeout - , --peer_timeout= : how long to look for peers - -Returns: - (float) Peers per blob / total blobs -``` - ## help ```text @@ -603,8 +737,12 @@ Return a useful message for an API command Usage: help [ | --command=] + Options: - , --command= : command to retrieve documentation for + --command= : (str) command to retrieve documentation for + +Returns: + (str) Help message ``` ## peer_list @@ -615,8 +753,10 @@ Get peers for blob hash Usage: peer_list ( | --blob_hash=) [ | --timeout=] + Options: - , --timeout= : peer search timeout in seconds + --blob_hash= : (str) find available peers for this blob hash + --timeout= : (int) peer search timeout in seconds Returns: (list) List of contacts @@ -649,39 +789,42 @@ Usage: [--channel_name=] [--channel_id=] [--claim_address=] [--change_address=] + Options: - --metadata= : ClaimDict to associate with the claim. - --file_path= : path to file to be associated with name. If provided, - a lbry stream of this file will be used in 'sources'. - If no path is given but a sources dict is provided, - it will be used. If neither are provided, an - error is raised. - --fee= : Dictionary representing key fee to download content: - { - 'currency': currency_symbol, - 'amount': float, - 'address': str, optional - } - supported currencies: LBC, USD, BTC - If an address is not provided a new one will be - automatically generated. Default fee is zero. - --title= : title of the publication - --description=<description> : description of the publication - --author=<author> : author of the publication - --language=<language> : language of the publication - --license=<license> : publication license - --license_url=<license_url> : publication license url - --thumbnail=<thumbnail> : thumbnail url - --preview=<preview> : preview url - --nsfw=<nsfw> : title of the publication - --sources=<sources> : {'lbry_sd_hash':sd_hash} specifies sd hash of file - --channel_name=<channel_name> : name of the publisher channel name in the wallet - --channel_id=<channel_id> : claim id of the publisher channel, does not check - for channel claim being in the wallet. This allows - publishing to a channel where only the certificate - private key is in the wallet. - --claim_address=<claim_address> : address where the claim is sent to, if not specified - new address wil automatically be created + --name=<name> : (str) name of the content + --bid=<bid> : (float) amount to back the claim + --metadata=<metadata> : (dict) ClaimDict to associate with the claim. + --file_path=<file_path> : (str) path to file to be associated with name. If provided, + a lbry stream of this file will be used in 'sources'. + If no path is given but a sources dict is provided, + it will be used. If neither are provided, an + error is raised. + --fee=<fee> : (dict) Dictionary representing key fee to download content: + { + 'currency': currency_symbol, + 'amount': float, + 'address': str, optional + } + supported currencies: LBC, USD, BTC + If an address is not provided a new one will be + automatically generated. Default fee is zero. + --title=<title> : (str) title of the publication + --description=<description> : (str) description of the publication + --author=<author> : (str) author of the publication + --language=<language> : (str) language of the publication + --license=<license> : (str) publication license + --license_url=<license_url> : (str) publication license url + --thumbnail=<thumbnail> : (str) thumbnail url + --preview=<preview> : (str) preview url + --nsfw=<nsfw> : (bool) title of the publication + --sources=<sources> : (str) {'lbry_sd_hash': sd_hash} specifies sd hash of file + --channel_name=<channel_name> : (str) name of the publisher channel name in the wallet + --channel_id=<channel_id> : (str) claim id of the publisher channel, does not check + for channel claim being in the wallet. This allows + publishing to a channel where only the certificate + private key is in the wallet. + --claim_address=<claim_address> : (str) address where the claim is sent to, if not specified + new address wil automatically be created Returns: (dict) Dictionary containing result of the claim @@ -702,6 +845,10 @@ Report a bug to slack Usage: report_bug (<message> | --message=<message>) + +Options: + --message=<message> : (str) Description of the bug + Returns: (bool) true if successful ``` @@ -712,10 +859,13 @@ Returns: Resolve given LBRY URIs Usage: - resolve [-f] (<uri> | --uri=<uri>) [<uris>...] + resolve [--force] (<uri> | --uri=<uri>) [<uris>...] + Options: - -f : force refresh and ignore cache + --force : (bool) force refresh and ignore cache + --uri=<uri> : (str) uri to resolve + --uris=<uris> : (list) uris to resolve Returns: Dictionary of results, keyed by uri @@ -735,6 +885,7 @@ Returns: 'depth': (int) claim depth, 'has_signature': (bool) included if decoded_claim 'name': (str) claim name, + 'permanent_url': (str) permanent url of the certificate claim, 'supports: (list) list of supports [{'txid': (str) txid, 'nout': (int) nout, 'amount': (float) amount}], @@ -759,6 +910,7 @@ Returns: 'depth': (int) claim depth, 'has_signature': (bool) included if decoded_claim 'name': (str) claim name, + 'permanent_url': (str) permanent url of the claim, 'channel_name': (str) channel name if claim is in a channel 'supports: (list) list of supports [{'txid': (str) txid, 'nout': (int) nout, @@ -777,10 +929,12 @@ Returns: Resolve stream info from a LBRY name Usage: - resolve_name <name> [-f] + resolve_name (<name> | --name=<name>) [--force] + Options: - -f : force refresh and do not check cache + --name=<name> : (str) the name to resolve + --force : (bool) force refresh and do not check cache Returns: (dict) Metadata dictionary from name claim, None if the name is not @@ -795,6 +949,10 @@ Get DHT routing information Usage: routing_table_get + +Options: + None + Returns: (dict) dictionary containing routing and contact information { @@ -821,6 +979,10 @@ Get daemon settings Usage: settings_get + +Options: + None + Returns: (dict) Dictionary of daemon settings See ADJUSTABLE_SETTINGS in lbrynet/conf.py for full list of settings @@ -847,34 +1009,32 @@ Usage: [--sd_download_timeout=<sd_download_timeout>] [--auto_renew_claim_height_delta=<auto_renew_claim_height_delta>] -Options: - --download_directory=<download_directory> : (str) - --data_rate=<data_rate> : (float), 0.0001 - --download_timeout=<download_timeout> : (int), 180 - --peer_port=<peer_port> : (int), 3333 - --max_key_fee=<max_key_fee> : (dict) maximum key fee for downloads, - in the format: { - "currency": <currency_symbol>, - "amount": <amount> - }. In the CLI, it must be an escaped - JSON string - Supported currency symbols: - LBC - BTC - USD - --disable_max_key_fee=<disable_max_key_fee> : (bool), False - --use_upnp=<use_upnp> : (bool), True - --run_reflector_server=<run_reflector_server> : (bool), False - --cache_time=<cache_time> : (int), 150 - --reflect_uploads=<reflect_uploads> : (bool), True - --share_usage_data=<share_usage_data> : (bool), True - --peer_search_timeout=<peer_search_timeout> : (int), 3 - --sd_download_timeout=<sd_download_timeout> : (int), 3 - --auto_renew_claim_height_delta=<auto_renew_claim_height_delta> : (int), 0 - claims set to expire within this many blocks will be - automatically renewed after startup (if set to 0, renews - will not be made automatically) +Options: + --download_directory=<download_directory> : (str) path of download directory + --data_rate=<data_rate> : (float) 0.0001 + --download_timeout=<download_timeout> : (int) 180 + --peer_port=<peer_port> : (int) 3333 + --max_key_fee=<max_key_fee> : (dict) maximum key fee for downloads, + in the format: + { + 'currency': <currency_symbol>, + 'amount': <amount> + }. + In the CLI, it must be an escaped JSON string + Supported currency symbols: LBC, USD, BTC + --disable_max_key_fee=<disable_max_key_fee> : (bool) False + --use_upnp=<use_upnp> : (bool) True + --run_reflector_server=<run_reflector_server> : (bool) False + --cache_time=<cache_time> : (int) 150 + --reflect_uploads=<reflect_uploads> : (bool) True + --share_usage_data=<share_usage_data> : (bool) True + --peer_search_timeout=<peer_search_timeout> : (int) 3 + --sd_download_timeout=<sd_download_timeout> : (int) 3 + --auto_renew_claim_height_delta=<auto_renew_claim_height_delta> : (int) 0 + claims set to expire within this many blocks will be + automatically renewed after startup (if set to 0, renews + will not be made automatically) Returns: (dict) Updated dictionary of daemon settings @@ -886,25 +1046,26 @@ Returns: Get daemon status Usage: - status [-s] [-d] + status [--session_status] [--dht_status] + Options: - -s : include session status in results - -d : include dht network and peer status + --session_status : (bool) include session status in results + --dht_status : (bool) include dht network and peer status Returns: (dict) lbrynet-daemon status { - 'lbry_id': lbry peer id, base58 - 'installation_id': installation id, base58 - 'is_running': bool - 'is_first_run': bool + 'lbry_id': lbry peer id, base58, + 'installation_id': installation id, base58, + 'is_running': bool, + 'is_first_run': bool, 'startup_status': { - 'code': status code + 'code': status code, 'message': status message }, 'connection_status': { - 'code': connection status code + 'code': connection status code, 'message': connection status message }, 'blockchain_status': { @@ -912,6 +1073,7 @@ Returns: 'blocks_behind': remote_height - local_height, 'best_blockhash': block hash of most recent block, }, + 'wallet_is_encrypted': bool, If given the session status option: 'session_status': { @@ -925,13 +1087,47 @@ Returns: 'dht_status': { 'kbps_received': current kbps receiving, 'kbps_sent': current kdps being sent, - 'total_bytes_sent': total bytes sent - 'total_bytes_received': total bytes received - 'queries_received': number of queries received per second - 'queries_sent': number of queries sent per second - 'recent_contacts': count of recently contacted peers + 'total_bytes_sent': total bytes sent, + 'total_bytes_received': total bytes received, + 'queries_received': number of queries received per second, + 'queries_sent': number of queries sent per second, + 'recent_contacts': count of recently contacted peers, 'unique_contacts': count of unique peers - } + }, + } +``` + +## stream_availability + +```text +Get stream availability for lbry uri + +Usage: + stream_availability (<uri> | --uri=<uri>) + [<search_timeout> | --search_timeout=<search_timeout>] + [<blob_timeout> | --blob_timeout=<blob_timeout>] + + +Options: + --uri=<uri> : (str) check availability for this uri + --search_timeout=<search_timeout> : (int) how long to search for peers for the blob + in the dht + --search_timeout=<blob_timeout> : (int) how long to try downloading from a peer + +Returns: + (dict) { + 'is_available': <bool>, + 'did_decode': <bool>, + 'did_resolve': <bool>, + 'is_stream': <bool>, + 'num_blobs_in_stream': <int>, + 'sd_hash': <str>, + 'sd_blob_availability': <dict> see `blob_availability`, + 'head_blob_hash': <str>, + 'head_blob_availability': <dict> see `blob_availability`, + 'use_upnp': <bool>, + 'upnp_redirect_is_set': <bool>, + 'error': <None> | <str> error message } ``` @@ -941,15 +1137,17 @@ Returns: Get estimated cost for a lbry stream Usage: - stream_cost_estimate <uri> [<size> | --size=<size>] + stream_cost_estimate (<uri> | --uri=<uri>) [<size> | --size=<size>] + Options: - <size>, --size=<size> : stream size in bytes. if provided an sd blob won't be - downloaded. + --uri=<uri> : (str) uri to use + --size=<size> : (float) stream size in bytes. if provided an sd blob won't be + downloaded. Returns: (float) Estimated cost in lbry credits, returns None if uri is not - resolveable + resolvable ``` ## transaction_list @@ -958,35 +1156,54 @@ Returns: List transactions belonging to wallet Usage: - transaction_list [-t] + transaction_list + Options: - -t : Include claim tip information + None Returns: - (list) List of transactions, where is_tip is null by default, - and set to a boolean if include_tip_info is true + (list) List of transactions { - "claim_info": (list) claim info if in txn [{"amount": (float) claim amount, - "claim_id": (str) claim id, - "claim_name": (str) claim name, - "nout": (int) nout}], + "claim_info": (list) claim info if in txn [{ + "address": (str) address of claim, + "balance_delta": (float) bid amount, + "amount": (float) claim amount, + "claim_id": (str) claim id, + "claim_name": (str) claim name, + "nout": (int) nout + }], + "abandon_info": (list) abandon info if in txn [{ + "address": (str) address of abandoned claim, + "balance_delta": (float) returned amount, + "amount": (float) claim amount, + "claim_id": (str) claim id, + "claim_name": (str) claim name, + "nout": (int) nout + }], "confirmations": (int) number of confirmations for the txn, "date": (str) date and time of txn, "fee": (float) txn fee, - "support_info": (list) support info if in txn [{"amount": (float) support amount, - "claim_id": (str) claim id, - "claim_name": (str) claim name, - "is_tip": (null) default, - (bool) if include_tip_info is true, - "nout": (int) nout}], + "support_info": (list) support info if in txn [{ + "address": (str) address of support, + "balance_delta": (float) support amount, + "amount": (float) support amount, + "claim_id": (str) claim id, + "claim_name": (str) claim name, + "is_tip": (bool), + "nout": (int) nout + }], "timestamp": (int) timestamp, "txid": (str) txn id, - "update_info": (list) update info if in txn [{"amount": (float) updated amount, - "claim_id": (str) claim id, - "claim_name": (str) claim name, - "nout": (int) nout}], + "update_info": (list) update info if in txn [{ + "address": (str) address of claim, + "balance_delta": (float) credited/debited + "amount": (float) absolute amount, + "claim_id": (str) claim id, + "claim_name": (str) claim name, + "nout": (int) nout + }], "value": (float) value of txn } ``` @@ -999,6 +1216,10 @@ Get a decoded transaction from a txid Usage: transaction_show (<txid> | --txid=<txid>) + +Options: + --txid=<txid> : (str) txid of the transaction + Returns: (dict) JSON formatted transaction ``` @@ -1011,6 +1232,10 @@ List unspent transaction outputs Usage: utxo_list + +Options: + None + Returns: (list) List of unspent transaction outputs (UTXOs) [ @@ -1037,6 +1262,10 @@ Get lbry version information Usage: version + +Options: + None + Returns: (dict) Dictionary of lbry version information { @@ -1059,16 +1288,51 @@ Returns: Return the balance of the wallet Usage: - wallet_balance [<address> | --address=<address>] [-u] + wallet_balance [<address> | --address=<address>] [--include_unconfirmed] + Options: - <address> : If provided only the balance for this address will be given - -u : Include unconfirmed + --address=<address> : (str) If provided only the balance for this + address will be given + --include_unconfirmed : (bool) Include unconfirmed Returns: (float) amount of lbry credits in wallet ``` +## wallet_decrypt + +```text +Decrypt an encrypted wallet, this will remove the wallet password + +Usage: + wallet_decrypt + + +Options: + None + +Returns: + (bool) true if wallet is decrypted, otherwise false +``` + +## wallet_encrypt + +```text +Encrypt a wallet with a password, if the wallet is already encrypted this will update +the password + +Usage: + wallet_encrypt (<new_password> | --new_password=<new_password>) + + +Options: + --new_password=<new_password> : (str) password string to be used for encrypting wallet + +Returns: + (bool) true if wallet is decrypted, otherwise false +``` + ## wallet_is_address_mine ```text @@ -1077,6 +1341,10 @@ Checks if an address is associated with the current wallet. Usage: wallet_is_address_mine (<address> | --address=<address>) + +Options: + --address=<address> : (str) address to check + Returns: (bool) true, if address is associated with current wallet ``` @@ -1089,6 +1357,10 @@ List wallet addresses Usage: wallet_list + +Options: + None + Returns: List of wallet addresses ``` @@ -1101,6 +1373,10 @@ Generate a new wallet address Usage: wallet_new_address + +Options: + None + Returns: (str) New wallet address in base58 ``` @@ -1115,6 +1391,12 @@ Usage: (<num_addresses> | --num_addresses=<num_addresses>) (<amount> | --amount=<amount>) + +Options: + --no_broadcast : (bool) whether to broadcast or not + --num_addresses=<num_addresses> : (int) num of addresses to create + --amount=<amount> : (float) initial amount in each address + Returns: (dict) the resulting transaction ``` @@ -1127,6 +1409,10 @@ Get public key from wallet address Usage: wallet_public_key (<address> | --address=<address>) + +Options: + --address=<address> : (str) address for which to get the public key + Returns: (list) list of public keys associated with address. Could contain more than one public key if multisig. @@ -1143,7 +1429,13 @@ Usage: wallet_send (<amount> | --amount=<amount>) ((<address> | --address=<address>) | (<claim_id> | --claim_id=<claim_id>)) -Return: + +Options: + --amount=<amount> : (float) amount of credit to send + --address=<address> : (str) address to send credits to + --claim_id=<claim_id> : (float) claim_id of the claim to send to tip to + +Returns: If sending to an address: (bool) true if payment successfully scheduled @@ -1156,6 +1448,22 @@ Return: } ``` +## wallet_unlock + +```text +Unlock an encrypted wallet + +Usage: + wallet_unlock (<password> | --password=<password>) + + +Options: + --password=<password> : (str) password for unlocking wallet + +Returns: + (bool) true if wallet is unlocked, otherwise false +``` + ## wallet_unused_address ```text @@ -1165,6 +1473,10 @@ a new address if there is none. Usage: wallet_unused_address + +Options: + None + Returns: (str) Unused wallet address in base58 ``` diff --git a/docs/index.md b/docs/index.md index c6ad8010d..a0f391ca9 100644 --- a/docs/index.md +++ b/docs/index.md @@ -1,14 +1,39 @@ # LBRY JSON-RPC API Documentation -## blob_announce_all +## blob_announce ```text -Announce all blobs to the DHT +Announce blobs to the DHT Args: - None + 'announce_all' (optional) : (bool) announce all the blobs possessed by user + 'blob_hash' (optional) : (str) announce a blob, specified by blob_hash + 'stream_hash' (optional) : (str) announce all blobs associated with + stream_hash + 'sd_hash' (optional) : (str) announce all blobs associated with + sd_hash and the sd_hash itself + Returns: - (str) Success/fail message + (bool) true if successful +``` + +## blob_availability + +```text +Get blob availability + +Args: + 'blob_hash' (optional) : (str) check availability for this blob hash + 'search_timeout' (optional) : (int) how long to search for peers for the blob + in the dht + 'blob_timeout' (optional) : (int) how long to try downloading from a peer + +Returns: + (dict) { + "is_available": <bool, true if blob is available from a peer from peer list> + "reachable_peers": ["<ip>:<port>"], + "unreachable_peers": ["<ip>:<port>"] + } ``` ## blob_delete @@ -17,7 +42,8 @@ Returns: Delete a blob Args: - 'blob_hash': (str) hash of blob to get + 'blob_hash' (optional) : (str) blob hash of the blob to delete + Returns: (str) Success/fail message ``` @@ -28,16 +54,18 @@ Returns: Download and return a blob Args: - 'blob_hash': (str) blob hash of blob to get - 'timeout'(optional): (int) timeout in number of seconds - 'encoding'(optional): (str) by default no attempt at decoding is made, - can be set to one of the following decoders: - 'json' - 'payment_rate_manager'(optional): if not given the default payment rate manager - will be used. supported alternative rate managers: - 'only-free' + 'blob_hash' : (str) blob hash of the blob to get + 'timeout' (optional) : (int) timeout in number of seconds + 'encoding' (optional) : (str) by default no attempt at decoding + is made, can be set to one of the + following decoders: + 'json' + 'payment_rate_manager' (optional) : (str) if not given the default payment rate + manager will be used. + supported alternative rate managers: + 'only-free' -Returns +Returns: (str) Success/Fail message or (dict) decoded data ``` @@ -47,13 +75,14 @@ Returns Returns blob hashes. If not given filters, returns all blobs known by the blob manager Args: - 'uri' (optional): (str) filter by blobs in stream for winning claim - 'stream_hash' (optional): (str) filter by blobs in given stream hash - 'sd_hash' (optional): (str) filter by blobs in given sd hash - 'needed' (optional): (bool) only return needed blobs - 'finished' (optional): (bool) only return finished blobs - 'page_size' (optional): (int) limit number of results returned - 'page' (optional): (int) filter to page x of [page_size] results + 'needed' (optional) : (bool) only return needed blobs + 'finished' (optional) : (bool) only return finished blobs + 'uri' (optional) : (str) filter blobs by stream in a uri + 'stream_hash' (optional) : (str) filter blobs by stream hash + 'sd_hash' (optional) : (str) filter blobs by sd hash + 'page_size' (optional) : (int) results page size + 'page' (optional) : (int) page of results to return + Returns: (list) List of blob hashes ``` @@ -64,7 +93,8 @@ Returns: Reflects all saved blobs Args: - None + None + Returns: (bool) true if successful ``` @@ -75,28 +105,58 @@ Returns: Get contents of a block Args: - 'blockhash': (str) hash of the block to look up + 'blockhash' : (str) hash of the block to look up + 'height' : (int) height of the block to look up + Returns: (dict) Requested block ``` -## channel_list_mine +## channel_export ```text -Get my channels +Export serialized channel signing information for a given certificate claim id + +Args: + 'claim_id' : (str) Claim ID to export information about Returns: - (list) ClaimDict + (str) Serialized certificate information +``` + +## channel_import + +```text +Import serialized channel signing information (to allow signing new claims to the channel) + +Args: + 'serialized_certificate_info' (optional) : (str) certificate info + +Returns: + (dict) Result dictionary +``` + +## channel_list + +```text +Get certificate claim infos for channels that can be published to + +Args: + None + +Returns: + (list) ClaimDict, includes 'is_mine' field to indicate if the certificate claim + is in the wallet. ``` ## channel_new ```text -Generate a publisher key and create a new certificate claim +Generate a publisher key and create a new '@' prefixed certificate claim Args: - 'channel_name': (str) '@' prefixed name - 'amount': (float) amount to claim name + 'channel_name' : (str) name of the channel prefixed with '@' + 'amount' : (float) bid amount on the channel Returns: (dict) Dictionary containing result of the claim @@ -115,8 +175,11 @@ Returns: Abandon a name and reclaim credits from the claim Args: - 'claim_id': (str) claim_id of claim -Return: + 'claim_id' (optional) : (str) claim_id of the claim to abandon + 'txid' (optional) : (str) txid of the claim to abandon + 'nout' (optional) : (int) nout of the claim to abandon + +Returns: (dict) Dictionary containing result of the claim { txid : (str) txid of resulting transaction @@ -127,11 +190,12 @@ Return: ## claim_list ```text -Get claims for a name +List current claims and information about them for a given name Args: - 'name': (str) search for claims on this name -Returns + 'name' : (str) name of the claim to list info about + +Returns: (dict) State of claims assigned for the name { 'claims': (list) list of claims for the name @@ -144,6 +208,7 @@ Returns 'height': (int) height of block containing the claim 'txid': (str) txid of the claim 'nout': (int) nout of the claim + 'permanent_url': (str) permanent url of the claim, 'supports': (list) a list of supports attached to the claim 'value': (str) the value of the claim }, @@ -153,14 +218,65 @@ Returns } ``` +## claim_list_by_channel + +```text +Get paginated claims in a channel specified by a channel uri + +Args: + 'uri' : (str) uri of the channel + 'uris' (optional) : (list) uris of the channel + 'page' (optional) : (int) which page of results to return where page 1 is the first + page, defaults to no pages + 'page_size' (optional) : (int) number of results in a page, default of 10 + +Returns: + { + resolved channel uri: { + If there was an error: + 'error': (str) error message + + 'claims_in_channel': the total number of results for the channel, + + If a page of results was requested: + 'returned_page': page number returned, + 'claims_in_channel': [ + { + 'absolute_channel_position': (int) claim index number in sorted list of + claims which assert to be part of the + channel + 'address': (str) claim address, + 'amount': (float) claim amount, + 'effective_amount': (float) claim amount including supports, + 'claim_id': (str) claim id, + 'claim_sequence': (int) claim sequence number, + 'decoded_claim': (bool) whether or not the claim value was decoded, + 'height': (int) claim height, + 'depth': (int) claim depth, + 'has_signature': (bool) included if decoded_claim + 'name': (str) claim name, + 'supports: (list) list of supports [{'txid': (str) txid, + 'nout': (int) nout, + 'amount': (float) amount}], + 'txid': (str) claim txid, + 'nout': (str) claim nout, + 'signature_is_valid': (bool), included if has_signature, + 'value': ClaimDict if decoded, otherwise hex string + } + ], + } + } +``` + ## claim_list_mine ```text List my name claims Args: - None -Returns + None + +Returns: (list) List of name claims owned by user [ { @@ -175,6 +291,7 @@ Returns 'height': (int) height of the block containing the claim 'is_spent': (bool) true if claim is abandoned, false otherwise 'name': (str) name of the claim + 'permanent_url': (str) permanent url of the claim, 'txid': (str) txid of the cliam 'nout': (int) nout of the claim 'value': (str) value of the claim @@ -188,10 +305,11 @@ Returns Support a name claim Args: - 'name': (str) Name of claim - 'claim_id': (str) claim ID of claim to support - 'amount': (float) amount to support by -Return: + 'name' : (str) name of the claim to support + 'claim_id' : (str) claim_id of the claim to support + 'amount' : (float) amount of support + +Returns: (dict) Dictionary containing result of the claim { txid : (str) txid of resulting support claim @@ -200,19 +318,66 @@ Return: } ``` +## claim_renew + +```text +Renew claim(s) or support(s) + +Args: + 'outpoint' : (str) outpoint of the claim to renew + 'height' : (str) update claims expiring before or at this block height + +Returns: + (dict) Dictionary where key is the the original claim's outpoint and + value is the result of the renewal + { + outpoint:{ + + 'tx' : (str) hex encoded transaction + 'txid' : (str) txid of resulting claim + 'nout' : (int) nout of the resulting claim + 'fee' : (float) fee paid for the claim transaction + 'claim_id' : (str) claim ID of the resulting claim + }, + } +``` + +## claim_send_to_address + +```text +Send a name claim to an address + +Args: + 'claim_id' : (str) claim_id to send + 'address' : (str) address to send the claim to + 'amount' (optional) : (int) Amount of credits to claim name for, defaults to the current amount + on the claim + +Returns: + (dict) Dictionary containing result of the claim + { + 'tx' : (str) hex encoded transaction + 'txid' : (str) txid of resulting claim + 'nout' : (int) nout of the resulting claim + 'fee' : (float) fee paid for the claim transaction + 'claim_id' : (str) claim ID of the resulting claim + } +``` + ## claim_show ```text -Resolve claim info from a LBRY name +Resolve claim info from txid/nout or with claim ID Args: - 'name': (str) name to look up, do not include lbry:// prefix - 'txid'(optional): (str) if specified, look for claim with this txid - 'nout'(optional): (int) if specified, look for claim with this nout - 'claim_id'(optional): (str) if specified, look for claim with this claim_id + 'txid' (optional) : (str) look for claim with this txid, nout must + also be specified + 'nout' (optional) : (int) look for claim with this nout, txid must + also be specified + 'claim_id' (optional) : (str) look for claim with this claim id + Returns: - (dict) Dictionary containing claim info, (bool) false if claim is not - resolvable + (dict) Dictionary containing claim info as below, { 'txid': (str) txid of claim @@ -223,6 +388,28 @@ Returns: 'claim_id': (str) claim ID of claim 'supports': (list) list of supports associated with claim } + + if claim cannot be resolved, dictionary as below will be returned + + { + 'error': (str) reason for error + } +``` + +## cli_test_command + +```text +This command is only for testing the CLI argument parsing +Args: + 'a_arg' (optional) : a arg + 'b_arg' (optional) : b arg + 'pos_arg' : pos arg + 'pos_args' (optional) : pos args + 'pos_arg2' (optional) : pos arg 2 + 'pos_arg3' (optional) : pos arg 3 + +Returns: + pos args ``` ## commands @@ -230,6 +417,9 @@ Returns: ```text Return a list of available commands +Args: + None + Returns: (list) list of available commands ``` @@ -239,42 +429,35 @@ Returns: ```text Stop lbrynet-daemon +Args: + None + Returns: (string) Shutdown message ``` -## descriptor_get - -```text -Download and return a sd blob - -Args: - 'sd_hash': (str) hash of sd blob - 'timeout'(optional): (int) timeout in number of seconds - 'payment_rate_manager'(optional): (str) if not given the default payment rate manager - will be used. supported alternative rate managers: - only-free - -Returns - (str) Success/Fail message or (dict) decoded data -``` - ## file_delete ```text -Delete a lbry file +Delete a LBRY file Args: - 'name' (optional): (str) delete file by lbry name, - 'sd_hash' (optional): (str) delete file by sd hash, - 'file_name' (optional): (str) delete file by the name in the downloads folder, - 'stream_hash' (optional): (str) delete file by stream hash, - 'claim_id' (optional): (str) delete file by claim ID, - 'outpoint' (optional): (str) delete file by claim outpoint, - 'rowid': (optional): (int) delete file by rowid in the file manager - 'delete_target_file' (optional): (bool) delete file from downloads folder, - defaults to true if false only the blobs and - db entries will be deleted + 'delete_from_download_dir' (optional) : (bool) delete file from download directory, + instead of just deleting blobs + 'delete_all' (optional) : (bool) if there are multiple matching files, + allow the deletion of multiple files. + Otherwise do not delete anything. + 'sd_hash' (optional) : (str) delete by file sd hash + 'file_name' (optional) : (str) delete by file name in downloads folder + 'stream_hash' (optional) : (str) delete by file stream hash + 'rowid' (optional) : (int) delete by file row id + 'claim_id' (optional) : (str) delete by file claim id + 'txid' (optional) : (str) delete by file claim txid + 'nout' (optional) : (int) delete by file claim nout + 'claim_name' (optional) : (str) delete by file claim name + 'channel_claim_id' (optional) : (str) delete by file channel claim id + 'channel_name' (optional) : (str) delete by file channel claim name + Returns: (bool) true if deletion was successful ``` @@ -285,14 +468,20 @@ Returns: List files limited by optional filters Args: - 'name' (optional): (str) filter files by lbry name, - 'sd_hash' (optional): (str) filter files by sd hash, - 'file_name' (optional): (str) filter files by the name in the downloads folder, - 'stream_hash' (optional): (str) filter files by stream hash, - 'claim_id' (optional): (str) filter files by claim id, - 'outpoint' (optional): (str) filter files by claim outpoint, - 'rowid' (optional): (int) filter files by internal row id, - 'full_status': (optional): (bool) if true populate the 'message' and 'size' fields + 'sd_hash' (optional) : (str) get file with matching sd hash + 'file_name' (optional) : (str) get file with matching file name in the + downloads folder + 'stream_hash' (optional) : (str) get file with matching stream hash + 'rowid' (optional) : (int) get file with matching row id + 'claim_id' (optional) : (str) get file with matching claim id + 'outpoint' (optional) : (str) get file with matching claim outpoint + 'txid' (optional) : (str) get file with matching claim txid + 'nout' (optional) : (int) get file with matching claim nout + 'channel_claim_id' (optional) : (str) get file with matching channel claim id + 'channel_name' (optional) : (str) get file with matching channel name + 'claim_name' (optional) : (str) get file with matching claim name + 'full_status' (optional) : (bool) full status, populate the + 'message' and 'size' fields Returns: (list) List of files @@ -308,30 +497,57 @@ Returns: 'stream_name': (str) stream name , 'suggested_file_name': (str) suggested file name, 'sd_hash': (str) sd hash of file, - 'name': (str) name claim attached to file - 'outpoint': (str) claim outpoint attached to file - 'claim_id': (str) claim ID attached to file, 'download_path': (str) download path of file, 'mime_type': (str) mime type of file, 'key': (str) key attached to file, - 'total_bytes': (int) file size in bytes, None if full_status is false - 'written_bytes': (int) written size in bytes - 'message': (str), None if full_status is false - 'metadata': (dict) Metadata dictionary + 'total_bytes': (int) file size in bytes, None if full_status is false, + 'written_bytes': (int) written size in bytes, + 'blobs_completed': (int) num_completed, None if full_status is false, + 'blobs_in_stream': (int) None if full_status is false, + 'status': (str) downloader status, None if full_status is false, + 'claim_id': (str) None if full_status is false or if claim is not found, + 'outpoint': (str) None if full_status is false or if claim is not found, + 'txid': (str) None if full_status is false or if claim is not found, + 'nout': (int) None if full_status is false or if claim is not found, + 'metadata': (dict) None if full_status is false or if claim is not found, + 'channel_claim_id': (str) None if full_status is false or if claim is not found or signed, + 'channel_name': (str) None if full_status is false or if claim is not found or signed, + 'claim_name': (str) None if full_status is false or if claim is not found }, ] ``` +## file_reflect + +```text +Reflect all the blobs in a file matching the filter criteria + +Args: + 'sd_hash' (optional) : (str) get file with matching sd hash + 'file_name' (optional) : (str) get file with matching file name in the + downloads folder + 'stream_hash' (optional) : (str) get file with matching stream hash + 'rowid' (optional) : (int) get file with matching row id + 'reflector' (optional) : (str) reflector server, ip address or url + by default choose a server from the config + +Returns: + (list) list of blobs reflected +``` + ## file_set_status ```text Start or stop downloading a file Args: - 'status': (str) "start" or "stop" - 'name' (optional): (str) start file by lbry name, - 'sd_hash' (optional): (str) start file by the hash in the name claim, - 'file_name' (optional): (str) start file by its name in the downloads folder, + 'status' : (str) one of "start" or "stop" + 'sd_hash' (optional) : (str) set status of file with matching sd hash + 'file_name' (optional) : (str) set status of file with matching file name in the + downloads folder + 'stream_hash' (optional) : (str) set status of file with matching stream hash + 'rowid' (optional) : (int) set status of file with matching row id + Returns: (str) Confirmation message ``` @@ -342,10 +558,10 @@ Returns: Download stream from a LBRY name. Args: - 'uri': (str) lbry uri to download - 'file_name'(optional): (str) a user specified name for the downloaded file - 'timeout'(optional): (int) download timeout in number of seconds - 'download_directory'(optional): (str) path to directory where file will be saved + 'uri' (optional) : (str) uri of the content to download + 'file_name' (optional) : (str) specified name for the downloaded file + 'timeout' (optional) : (int) download timeout in number of seconds + Returns: (dict) Dictionary containing information about the stream { @@ -355,46 +571,38 @@ Returns: 'points_paid': (float) credit paid to download file, 'stopped': (bool) true if download is stopped, 'stream_hash': (str) stream hash of file, - 'stream_name': (str) stream name, + 'stream_name': (str) stream name , 'suggested_file_name': (str) suggested file name, 'sd_hash': (str) sd hash of file, - 'name': (str) name claim attached to file - 'outpoint': (str) claim outpoint attached to file - 'claim_id': (str) claim ID attached to file, 'download_path': (str) download path of file, 'mime_type': (str) mime type of file, 'key': (str) key attached to file, - 'total_bytes': (int) file size in bytes, None if full_status is false - 'written_bytes': (int) written size in bytes - 'message': (str), None if full_status is false - 'metadata': (dict) Metadata dictionary + 'total_bytes': (int) file size in bytes, None if full_status is false, + 'written_bytes': (int) written size in bytes, + 'blobs_completed': (int) num_completed, None if full_status is false, + 'blobs_in_stream': (int) None if full_status is false, + 'status': (str) downloader status, None if full_status is false, + 'claim_id': (str) claim id, + 'outpoint': (str) claim outpoint string, + 'txid': (str) claim txid, + 'nout': (int) claim nout, + 'metadata': (dict) claim metadata, + 'channel_claim_id': (str) None if claim is not signed + 'channel_name': (str) None if claim is not signed + 'claim_name': (str) claim name } ``` -## get_availability - -```text -Get stream availability for lbry uri - -Args: - 'uri' : (str) lbry uri - 'sd_timeout' (optional): (int) sd blob download timeout - 'peer_timeout' (optional): (int) how long to look for peers - -Returns: - (float) Peers per blob / total blobs -``` - ## help ```text Return a useful message for an API command Args: - 'command'(optional): (str) command to retrieve documentation for + 'command' (optional) : (str) command to retrieve documentation for + Returns: - (str) if given a command, returns documentation about that command - otherwise returns general help message + (str) Help message ``` ## peer_list @@ -403,8 +611,9 @@ Returns: Get peers for blob hash Args: - 'blob_hash': (str) blob hash - 'timeout'(optional): (int) peer search timeout in seconds + 'blob_hash' : (str) find available peers for this blob hash + 'timeout' (optional) : (int) peer search timeout in seconds + Returns: (list) List of contacts ``` @@ -420,7 +629,7 @@ Fields required in the final Metadata are: 'description' 'author' 'language' - 'license', + 'license' 'nsfw' Metadata can be set by either using the metadata argument or by setting individual arguments @@ -428,29 +637,40 @@ fee, title, description, author, language, license, license_url, thumbnail, prev or sources. Individual arguments will overwrite the fields specified in metadata argument. Args: - 'name': (str) name to be claimed - 'bid': (float) amount of credits to commit in this claim, - 'metadata'(optional): (dict) Metadata to associate with the claim. - 'file_path'(optional): (str) path to file to be associated with name. If provided, - a lbry stream of this file will be used in 'sources'. - If no path is given but a metadata dict is provided, the source - from the given metadata will be used. - 'fee'(optional): (dict) Dictionary representing key fee to download content: - {currency_symbol: {'amount': float, 'address': str, optional}} - supported currencies: LBC, USD, BTC - If an address is not provided a new one will be automatically - generated. Default fee is zero. - 'title'(optional): (str) title of the file - 'description'(optional): (str) description of the file - 'author'(optional): (str) author of the file - 'language'(optional): (str), language code - 'license'(optional): (str) license for the file - 'license_url'(optional): (str) URL to license - 'thumbnail'(optional): (str) thumbnail URL for the file - 'preview'(optional): (str) preview URL for the file - 'nsfw'(optional): (bool) True if not safe for work - 'sources'(optional): (dict){'lbry_sd_hash':sd_hash} specifies sd hash of file - 'channel_name' (optional): (str) name of the publisher channel + 'name' : (str) name of the content + 'bid' : (float) amount to back the claim + 'metadata' (optional) : (dict) ClaimDict to associate with the claim. + 'file_path' (optional) : (str) path to file to be associated with name. If provided, + a lbry stream of this file will be used in 'sources'. + If no path is given but a sources dict is provided, + it will be used. If neither are provided, an + error is raised. + 'fee' (optional) : (dict) Dictionary representing key fee to download content: + { + 'currency': currency_symbol, + 'amount': float, + 'address': str, optional + } + supported currencies: LBC, USD, BTC + If an address is not provided a new one will be + automatically generated. Default fee is zero. + 'title' (optional) : (str) title of the publication + 'description' (optional) : (str) description of the publication + 'author' (optional) : (str) author of the publication + 'language' (optional) : (str) language of the publication + 'license' (optional) : (str) publication license + 'license_url' (optional) : (str) publication license url + 'thumbnail' (optional) : (str) thumbnail url + 'preview' (optional) : (str) preview url + 'nsfw' (optional) : (bool) title of the publication + 'sources' (optional) : (str) {'lbry_sd_hash': sd_hash} specifies sd hash of file + 'channel_name' (optional) : (str) name of the publisher channel name in the wallet + 'channel_id' (optional) : (str) claim id of the publisher channel, does not check + for channel claim being in the wallet. This allows + publishing to a channel where only the certificate + private key is in the wallet. + 'claim_address' (optional) : (str) address where the claim is sent to, if not specified + new address wil automatically be created Returns: (dict) Dictionary containing result of the claim @@ -463,24 +683,14 @@ Returns: } ``` -## reflect - -```text -Reflect a stream - -Args: - 'sd_hash': (str) sd_hash of lbry file -Returns: - (bool) true if successful -``` - ## report_bug ```text Report a bug to slack Args: - 'message': (str) message to send + 'message' : (str) Description of the bug + Returns: (bool) true if successful ``` @@ -488,35 +698,21 @@ Returns: ## resolve ```text -Resolve a LBRY URI +Resolve given LBRY URIs Args: - 'uri': (str) uri to download + 'force' (optional) : (bool) force refresh and ignore cache + 'uri' : (str) uri to resolve + 'uris' (optional) : (list) uris to resolve + Returns: - None if nothing can be resolved, otherwise: - If uri resolves to a channel or a claim in a channel: - 'certificate': { - 'address': (str) claim address, - 'amount': (float) claim amount, - 'effective_amount': (float) claim amount including supports, - 'claim_id': (str) claim id, - 'claim_sequence': (int) claim sequence number, - 'decoded_claim': (bool) whether or not the claim value was decoded, - 'height': (int) claim height, - 'depth': (int) claim depth, - 'has_signature': (bool) included if decoded_claim - 'name': (str) claim name, - 'supports: (list) list of supports [{'txid': txid, - 'nout': nout, - 'amount': amount}], - 'txid': (str) claim txid, - 'nout': (str) claim nout, - 'signature_is_valid': (bool), included if has_signature, - 'value': ClaimDict if decoded, otherwise hex string - } - If uri resolves to a channel: - 'claims_in_channel': [ - { + Dictionary of results, keyed by uri + '<uri>': { + If a resolution error occurs: + 'error': Error message + + If the uri resolves to a channel or a claim in a channel: + 'certificate': { 'address': (str) claim address, 'amount': (float) claim amount, 'effective_amount': (float) claim amount including supports, @@ -527,36 +723,41 @@ Returns: 'depth': (int) claim depth, 'has_signature': (bool) included if decoded_claim 'name': (str) claim name, - 'supports: (list) list of supports [{'txid': txid, - 'nout': nout, - 'amount': amount}], + 'permanent_url': (str) permanent url of the certificate claim, + 'supports: (list) list of supports [{'txid': (str) txid, + 'nout': (int) nout, + 'amount': (float) amount}], + 'txid': (str) claim txid, + 'nout': (str) claim nout, + 'signature_is_valid': (bool), included if has_signature, + 'value': ClaimDict if decoded, otherwise hex string + } + + If the uri resolves to a channel: + 'claims_in_channel': (int) number of claims in the channel, + + If the uri resolves to a claim: + 'claim': { + 'address': (str) claim address, + 'amount': (float) claim amount, + 'effective_amount': (float) claim amount including supports, + 'claim_id': (str) claim id, + 'claim_sequence': (int) claim sequence number, + 'decoded_claim': (bool) whether or not the claim value was decoded, + 'height': (int) claim height, + 'depth': (int) claim depth, + 'has_signature': (bool) included if decoded_claim + 'name': (str) claim name, + 'permanent_url': (str) permanent url of the claim, + 'channel_name': (str) channel name if claim is in a channel + 'supports: (list) list of supports [{'txid': (str) txid, + 'nout': (int) nout, + 'amount': (float) amount}] 'txid': (str) claim txid, 'nout': (str) claim nout, 'signature_is_valid': (bool), included if has_signature, 'value': ClaimDict if decoded, otherwise hex string } - ] - If uri resolves to a claim: - 'claim': { - 'address': (str) claim address, - 'amount': (float) claim amount, - 'effective_amount': (float) claim amount including supports, - 'claim_id': (str) claim id, - 'claim_sequence': (int) claim sequence number, - 'decoded_claim': (bool) whether or not the claim value was decoded, - 'height': (int) claim height, - 'depth': (int) claim depth, - 'has_signature': (bool) included if decoded_claim - 'name': (str) claim name, - 'channel_name': (str) channel name if claim is in a channel - 'supports: (list) list of supports [{'txid': txid, - 'nout': nout, - 'amount': amount}] - 'txid': (str) claim txid, - 'nout': (str) claim nout, - 'signature_is_valid': (bool), included if has_signature, - 'value': ClaimDict if decoded, otherwise hex string - } } ``` @@ -566,22 +767,38 @@ Returns: Resolve stream info from a LBRY name Args: - 'name': (str) name to look up, do not include lbry:// prefix + 'name' : (str) the name to resolve + 'force' (optional) : (bool) force refresh and do not check cache + Returns: (dict) Metadata dictionary from name claim, None if the name is not resolvable ``` -## send_amount_to_address +## routing_table_get ```text -Send credits to an address +Get DHT routing information Args: - 'amount': (float) the amount to send - 'address': (str) the address of the recipient in base58 + None + Returns: - (bool) true if payment successfully scheduled + (dict) dictionary containing routing and contact information + { + "buckets": { + <bucket index>: [ + { + "address": (str) peer address, + "node_id": (str) peer node id, + "blobs": (list) blob hashes announced by peer + } + ] + }, + "contacts": (list) contact node ids, + "blob_hashes": (list) all of the blob hashes stored by peers in the list of buckets, + "node_id": (str) the local dht node id + } ``` ## settings_get @@ -589,6 +806,9 @@ Returns: ```text Get daemon settings +Args: + None + Returns: (dict) Dictionary of daemon settings See ADJUSTABLE_SETTINGS in lbrynet/conf.py for full list of settings @@ -600,17 +820,31 @@ Returns: Set daemon settings Args: - 'run_on_startup': (bool) currently not supported - 'data_rate': (float) data rate, - 'max_key_fee': (float) maximum key fee, - 'disable_max_key_fee': (bool) true to disable max_key_fee check, - 'download_directory': (str) path of where files are downloaded, - 'peer_port': (int) port through which daemon should connect, - 'max_upload': (float), currently not supported - 'max_download': (float), currently not supported - 'download_timeout': (int) download timeout in seconds - 'search_timeout': (float) search timeout in seconds - 'cache_time': (int) cache timeout in seconds + 'download_directory' (optional) : (str) path of download directory + 'data_rate' (optional) : (float) 0.0001 + 'download_timeout' (optional) : (int) 180 + 'peer_port' (optional) : (int) 3333 + 'max_key_fee' (optional) : (dict) maximum key fee for downloads, + in the format: + { + 'currency': <currency_symbol>, + 'amount': <amount> + }. + In the CLI, it must be an escaped JSON string + Supported currency symbols: LBC, USD, BTC + 'disable_max_key_fee' (optional) : (bool) False + 'use_upnp' (optional) : (bool) True + 'run_reflector_server' (optional) : (bool) False + 'cache_time' (optional) : (int) 150 + 'reflect_uploads' (optional) : (bool) True + 'share_usage_data' (optional) : (bool) True + 'peer_search_timeout' (optional) : (int) 3 + 'sd_download_timeout' (optional) : (int) 3 + 'auto_renew_claim_height_delta' (optional) : (int) 0 + claims set to expire within this many blocks will be + automatically renewed after startup (if set to 0, renews + will not be made automatically) + Returns: (dict) Updated dictionary of daemon settings ``` @@ -618,13 +852,82 @@ Returns: ## status ```text -Return daemon status +Get daemon status Args: - 'session_status' (optional): (bool) true to return session status, - default is false + 'session_status' (optional) : (bool) include session status in results + 'dht_status' (optional) : (bool) include dht network and peer status + Returns: - (dict) Daemon status dictionary + (dict) lbrynet-daemon status + { + 'lbry_id': lbry peer id, base58, + 'installation_id': installation id, base58, + 'is_running': bool, + 'is_first_run': bool, + 'startup_status': { + 'code': status code, + 'message': status message + }, + 'connection_status': { + 'code': connection status code, + 'message': connection status message + }, + 'blockchain_status': { + 'blocks': local blockchain height, + 'blocks_behind': remote_height - local_height, + 'best_blockhash': block hash of most recent block, + }, + 'wallet_is_encrypted': bool, + + If given the session status option: + 'session_status': { + 'managed_blobs': count of blobs in the blob manager, + 'managed_streams': count of streams in the file manager + 'announce_queue_size': number of blobs currently queued to be announced + 'should_announce_blobs': number of blobs that should be announced + } + + If given the dht status option: + 'dht_status': { + 'kbps_received': current kbps receiving, + 'kbps_sent': current kdps being sent, + 'total_bytes_sent': total bytes sent, + 'total_bytes_received': total bytes received, + 'queries_received': number of queries received per second, + 'queries_sent': number of queries sent per second, + 'recent_contacts': count of recently contacted peers, + 'unique_contacts': count of unique peers + }, + } +``` + +## stream_availability + +```text +Get stream availability for lbry uri + +Args: + 'uri' : (str) check availability for this uri + 'search_timeout' (optional) : (int) how long to search for peers for the blob + in the dht + 'search_timeout' (optional) : (int) how long to try downloading from a peer + +Returns: + (dict) { + 'is_available': <bool>, + 'did_decode': <bool>, + 'did_resolve': <bool>, + 'is_stream': <bool>, + 'num_blobs_in_stream': <int>, + 'sd_hash': <str>, + 'sd_blob_availability': <dict> see `blob_availability`, + 'head_blob_hash': <str>, + 'head_blob_availability': <dict> see `blob_availability`, + 'use_upnp': <bool>, + 'upnp_redirect_is_set': <bool>, + 'error': <None> | <str> error message + } ``` ## stream_cost_estimate @@ -633,12 +936,13 @@ Returns: Get estimated cost for a lbry stream Args: - 'name': (str) lbry name - 'size' (optional): (int) stream size, in bytes. if provided an sd blob - won't be downloaded. + 'uri' : (str) uri to use + 'size' (optional) : (float) stream size in bytes. if provided an sd blob won't be + downloaded. + Returns: (float) Estimated cost in lbry credits, returns None if uri is not - resolveable + resolvable ``` ## transaction_list @@ -647,9 +951,52 @@ Returns: List transactions belonging to wallet Args: - None + None + Returns: (list) List of transactions + + { + "claim_info": (list) claim info if in txn [{ + "address": (str) address of claim, + "balance_delta": (float) bid amount, + "amount": (float) claim amount, + "claim_id": (str) claim id, + "claim_name": (str) claim name, + "nout": (int) nout + }], + "abandon_info": (list) abandon info if in txn [{ + "address": (str) address of abandoned claim, + "balance_delta": (float) returned amount, + "amount": (float) claim amount, + "claim_id": (str) claim id, + "claim_name": (str) claim name, + "nout": (int) nout + }], + "confirmations": (int) number of confirmations for the txn, + "date": (str) date and time of txn, + "fee": (float) txn fee, + "support_info": (list) support info if in txn [{ + "address": (str) address of support, + "balance_delta": (float) support amount, + "amount": (float) support amount, + "claim_id": (str) claim id, + "claim_name": (str) claim name, + "is_tip": (bool), + "nout": (int) nout + }], + "timestamp": (int) timestamp, + "txid": (str) txn id, + "update_info": (list) update info if in txn [{ + "address": (str) address of claim, + "balance_delta": (float) credited/debited + "amount": (float) absolute amount, + "claim_id": (str) claim id, + "claim_name": (str) claim name, + "nout": (int) nout + }], + "value": (float) value of txn + } ``` ## transaction_show @@ -658,18 +1005,46 @@ Returns: Get a decoded transaction from a txid Args: - 'txid': (str) txid of transaction + 'txid' : (str) txid of the transaction + Returns: (dict) JSON formatted transaction ``` +## utxo_list + +```text +List unspent transaction outputs + +Args: + None + +Returns: + (list) List of unspent transaction outputs (UTXOs) + [ + { + "address": (str) the output address + "amount": (float) unspent amount + "height": (int) block height + "is_claim": (bool) is the tx a claim + "is_coinbase": (bool) is the tx a coinbase tx + "is_support": (bool) is the tx a support + "is_update": (bool) is the tx an update + "nout": (int) nout of the output + "txid": (str) txid of the output + }, + ... + ] +``` + ## version ```text Get lbry version information Args: - None + None + Returns: (dict) Dictionary of lbry version information { @@ -692,21 +1067,47 @@ Returns: Return the balance of the wallet Args: - 'address' (optional): If address is provided only that balance will be given - 'include_unconfirmed' (optional): If set unconfirmed balance will be included in - the only takes effect when address is also provided. + 'address' (optional) : (str) If provided only the balance for this + address will be given + 'include_unconfirmed' (optional) : (bool) Include unconfirmed Returns: (float) amount of lbry credits in wallet ``` +## wallet_decrypt + +```text +Decrypt an encrypted wallet, this will remove the wallet password + +Args: + None + +Returns: + (bool) true if wallet is decrypted, otherwise false +``` + +## wallet_encrypt + +```text +Encrypt a wallet with a password, if the wallet is already encrypted this will update +the password + +Args: + 'new_password' : (str) password string to be used for encrypting wallet + +Returns: + (bool) true if wallet is decrypted, otherwise false +``` + ## wallet_is_address_mine ```text Checks if an address is associated with the current wallet. Args: - 'address': (str) address to check in base58 + 'address' : (str) address to check + Returns: (bool) true, if address is associated with current wallet ``` @@ -717,7 +1118,8 @@ Returns: List wallet addresses Args: - None + None + Returns: List of wallet addresses ``` @@ -728,23 +1130,76 @@ Returns: Generate a new wallet address Args: - None + None + Returns: (str) New wallet address in base58 ``` +## wallet_prefill_addresses + +```text +Create new addresses, each containing `amount` credits + +Args: + 'no_broadcast' (optional) : (bool) whether to broadcast or not + 'num_addresses' : (int) num of addresses to create + 'amount' : (float) initial amount in each address + +Returns: + (dict) the resulting transaction +``` + ## wallet_public_key ```text Get public key from wallet address Args: - 'address': (str) wallet address in base58 + 'address' : (str) address for which to get the public key + Returns: (list) list of public keys associated with address. Could contain more than one public key if multisig. ``` +## wallet_send + +```text +Send credits. If given an address, send credits to it. If given a claim id, send a tip +to the owner of a claim specified by uri. A tip is a claim support where the recipient +of the support is the claim address for the claim being supported. + +Args: + 'amount' : (float) amount of credit to send + 'address' : (str) address to send credits to + 'claim_id' : (float) claim_id of the claim to send to tip to + +Returns: + If sending to an address: + (bool) true if payment successfully scheduled + + If sending a claim tip: + (dict) Dictionary containing the result of the support + { + txid : (str) txid of resulting support claim + nout : (int) nout of the resulting support claim + fee : (float) fee paid for the transaction + } +``` + +## wallet_unlock + +```text +Unlock an encrypted wallet + +Args: + 'password' : (str) password for unlocking wallet + +Returns: + (bool) true if wallet is unlocked, otherwise false +``` + ## wallet_unused_address ```text @@ -752,7 +1207,8 @@ Return an address containing no balance, will create a new address if there is none. Args: - None + None + Returns: (str) Unused wallet address in base58 ``` diff --git a/lbrynet/daemon/Daemon.py b/lbrynet/daemon/Daemon.py index afad31566..419409af2 100644 --- a/lbrynet/daemon/Daemon.py +++ b/lbrynet/daemon/Daemon.py @@ -997,17 +997,16 @@ class Daemon(AuthJSONRPCServer): ############################################################################ @defer.inlineCallbacks - @AuthJSONRPCServer.flags(session_status="-s", dht_status="-d") def jsonrpc_status(self, session_status=False, dht_status=False): """ Get daemon status Usage: - status [-s] [-d] + status [--session_status] [--dht_status] Options: - -s : include session status in results - -d : include dht network and peer status + --session_status : (bool) include session status in results + --dht_status : (bool) include dht network and peer status Returns: (dict) lbrynet-daemon status @@ -1107,6 +1106,9 @@ class Daemon(AuthJSONRPCServer): Usage: version + Options: + None + Returns: (dict) Dictionary of lbry version information { @@ -1135,6 +1137,9 @@ class Daemon(AuthJSONRPCServer): Usage: report_bug (<message> | --message=<message>) + Options: + --message=<message> : (str) Description of the bug + Returns: (bool) true if successful """ @@ -1155,6 +1160,9 @@ class Daemon(AuthJSONRPCServer): Usage: settings_get + Options: + None + Returns: (dict) Dictionary of daemon settings See ADJUSTABLE_SETTINGS in lbrynet/conf.py for full list of settings @@ -1184,29 +1192,27 @@ class Daemon(AuthJSONRPCServer): [--auto_renew_claim_height_delta=<auto_renew_claim_height_delta>] Options: - --download_directory=<download_directory> : (str) - --data_rate=<data_rate> : (float), 0.0001 - --download_timeout=<download_timeout> : (int), 180 - --peer_port=<peer_port> : (int), 3333 - --max_key_fee=<max_key_fee> : (dict) maximum key fee for downloads, - in the format: { - "currency": <currency_symbol>, - "amount": <amount> - }. In the CLI, it must be an escaped - JSON string - Supported currency symbols: - LBC - BTC - USD - --disable_max_key_fee=<disable_max_key_fee> : (bool), False - --use_upnp=<use_upnp> : (bool), True - --run_reflector_server=<run_reflector_server> : (bool), False - --cache_time=<cache_time> : (int), 150 - --reflect_uploads=<reflect_uploads> : (bool), True - --share_usage_data=<share_usage_data> : (bool), True - --peer_search_timeout=<peer_search_timeout> : (int), 3 - --sd_download_timeout=<sd_download_timeout> : (int), 3 - --auto_renew_claim_height_delta=<auto_renew_claim_height_delta> : (int), 0 + --download_directory=<download_directory> : (str) path of download directory + --data_rate=<data_rate> : (float) 0.0001 + --download_timeout=<download_timeout> : (int) 180 + --peer_port=<peer_port> : (int) 3333 + --max_key_fee=<max_key_fee> : (dict) maximum key fee for downloads, + in the format: + { + 'currency': <currency_symbol>, + 'amount': <amount> + }. + In the CLI, it must be an escaped JSON string + Supported currency symbols: LBC, USD, BTC + --disable_max_key_fee=<disable_max_key_fee> : (bool) False + --use_upnp=<use_upnp> : (bool) True + --run_reflector_server=<run_reflector_server> : (bool) False + --cache_time=<cache_time> : (int) 150 + --reflect_uploads=<reflect_uploads> : (bool) True + --share_usage_data=<share_usage_data> : (bool) True + --peer_search_timeout=<peer_search_timeout> : (int) 3 + --sd_download_timeout=<sd_download_timeout> : (int) 3 + --auto_renew_claim_height_delta=<auto_renew_claim_height_delta> : (int) 0 claims set to expire within this many blocks will be automatically renewed after startup (if set to 0, renews will not be made automatically) @@ -1227,7 +1233,10 @@ class Daemon(AuthJSONRPCServer): help [<command> | --command=<command>] Options: - <command>, --command=<command> : command to retrieve documentation for + --command=<command> : (str) command to retrieve documentation for + + Returns: + (str) Help message """ if command is None: @@ -1256,22 +1265,25 @@ class Daemon(AuthJSONRPCServer): Usage: commands + Options: + None + Returns: (list) list of available commands """ return self._render_response(sorted([command for command in self.callable_methods.keys()])) - @AuthJSONRPCServer.flags(include_unconfirmed='-u') def jsonrpc_wallet_balance(self, address=None, include_unconfirmed=False): """ Return the balance of the wallet Usage: - wallet_balance [<address> | --address=<address>] [-u] + wallet_balance [<address> | --address=<address>] [--include_unconfirmed] Options: - <address> : If provided only the balance for this address will be given - -u : Include unconfirmed + --address=<address> : (str) If provided only the balance for this + address will be given + --include_unconfirmed : (bool) Include unconfirmed Returns: (float) amount of lbry credits in wallet @@ -1288,7 +1300,10 @@ class Daemon(AuthJSONRPCServer): Unlock an encrypted wallet Usage: - wallet_unlock (<password>) + wallet_unlock (<password> | --password=<password>) + + Options: + --password=<password> : (str) password for unlocking wallet Returns: (bool) true if wallet is unlocked, otherwise false @@ -1312,6 +1327,9 @@ class Daemon(AuthJSONRPCServer): Usage: wallet_decrypt + Options: + None + Returns: (bool) true if wallet is decrypted, otherwise false """ @@ -1327,7 +1345,10 @@ class Daemon(AuthJSONRPCServer): the password Usage: - wallet_encrypt (<new_password>) + wallet_encrypt (<new_password> | --new_password=<new_password>) + + Options: + --new_password=<new_password> : (str) password string to be used for encrypting wallet Returns: (bool) true if wallet is decrypted, otherwise false @@ -1345,6 +1366,9 @@ class Daemon(AuthJSONRPCServer): Usage: daemon_stop + Options: + None + Returns: (string) Shutdown message """ @@ -1355,7 +1379,6 @@ class Daemon(AuthJSONRPCServer): defer.returnValue(response) @defer.inlineCallbacks - @AuthJSONRPCServer.flags(full_status='-f') def jsonrpc_file_list(self, **kwargs): """ List files limited by optional filters @@ -1364,22 +1387,23 @@ class Daemon(AuthJSONRPCServer): file_list [--sd_hash=<sd_hash>] [--file_name=<file_name>] [--stream_hash=<stream_hash>] [--rowid=<rowid>] [--claim_id=<claim_id>] [--outpoint=<outpoint>] [--txid=<txid>] [--nout=<nout>] [--channel_claim_id=<channel_claim_id>] [--channel_name=<channel_name>] - [--claim_name=<claim_name>] [-f] + [--claim_name=<claim_name>] [--full_status] Options: - --sd_hash=<sd_hash> : get file with matching sd hash - --file_name=<file_name> : get file with matching file name in the + --sd_hash=<sd_hash> : (str) get file with matching sd hash + --file_name=<file_name> : (str) get file with matching file name in the downloads folder - --stream_hash=<stream_hash> : get file with matching stream hash - --rowid=<rowid> : get file with matching row id - --claim_id=<claim_id> : get file with matching claim id - --outpoint=<outpoint> : get file with matching claim outpoint - --txid=<txid> : get file with matching claim txid - --nout=<nout> : get file with matching claim nout - --channel_claim_id=<channel_claim_id> : get file with matching channel claim id - --channel_name=<channel_name> : get file with matching channel name - --claim_name=<claim_name> : get file with matching claim name - -f : full status, populate the 'message' and 'size' fields + --stream_hash=<stream_hash> : (str) get file with matching stream hash + --rowid=<rowid> : (int) get file with matching row id + --claim_id=<claim_id> : (str) get file with matching claim id + --outpoint=<outpoint> : (str) get file with matching claim outpoint + --txid=<txid> : (str) get file with matching claim txid + --nout=<nout> : (int) get file with matching claim nout + --channel_claim_id=<channel_claim_id> : (str) get file with matching channel claim id + --channel_name=<channel_name> : (str) get file with matching channel name + --claim_name=<claim_name> : (str) get file with matching claim name + --full_status : (bool) full status, populate the + 'message' and 'size' fields Returns: (list) List of files @@ -1420,16 +1444,16 @@ class Daemon(AuthJSONRPCServer): defer.returnValue(response) @defer.inlineCallbacks - @AuthJSONRPCServer.flags(force='-f') def jsonrpc_resolve_name(self, name, force=False): """ Resolve stream info from a LBRY name Usage: - resolve_name <name> [-f] + resolve_name (<name> | --name=<name>) [--force] Options: - -f : force refresh and do not check cache + --name=<name> : (str) the name to resolve + --force : (bool) force refresh and do not check cache Returns: (dict) Metadata dictionary from name claim, None if the name is not @@ -1454,11 +1478,11 @@ class Daemon(AuthJSONRPCServer): [<claim_id> | --claim_id=<claim_id>] Options: - <txid>, --txid=<txid> : look for claim with this txid, nout must - also be specified - <nout>, --nout=<nout> : look for claim with this nout, txid must - also be specified - <claim_id>, --claim_id=<claim_id> : look for claim with this claim id + --txid=<txid> : (str) look for claim with this txid, nout must + also be specified + --nout=<nout> : (int) look for claim with this nout, txid must + also be specified + --claim_id=<claim_id> : (str) look for claim with this claim id Returns: (dict) Dictionary containing claim info as below, @@ -1492,16 +1516,17 @@ class Daemon(AuthJSONRPCServer): @AuthJSONRPCServer.auth_required @defer.inlineCallbacks - @AuthJSONRPCServer.flags(force='-f') def jsonrpc_resolve(self, force=False, uri=None, uris=[]): """ Resolve given LBRY URIs Usage: - resolve [-f] (<uri> | --uri=<uri>) [<uris>...] + resolve [--force] (<uri> | --uri=<uri>) [<uris>...] Options: - -f : force refresh and ignore cache + --force : (bool) force refresh and ignore cache + --uri=<uri> : (str) uri to resolve + --uris=<uris> : (list) uris to resolve Returns: Dictionary of results, keyed by uri @@ -1591,8 +1616,9 @@ class Daemon(AuthJSONRPCServer): Options: - <file_name> : specified name for the downloaded file - <timeout> : download timeout in number of seconds + --uri=<uri> : (str) uri of the content to download + --file_name=<file_name> : (str) specified name for the downloaded file + --timeout=<timeout> : (int) download timeout in number of seconds Returns: (dict) Dictionary containing information about the stream @@ -1675,15 +1701,16 @@ class Daemon(AuthJSONRPCServer): Start or stop downloading a file Usage: - file_set_status <status> [--sd_hash=<sd_hash>] [--file_name=<file_name>] - [--stream_hash=<stream_hash>] [--rowid=<rowid>] + file_set_status (<status> | --status=<status>) [--sd_hash=<sd_hash>] + [--file_name=<file_name>] [--stream_hash=<stream_hash>] [--rowid=<rowid>] Options: - --sd_hash=<sd_hash> : set status of file with matching sd hash - --file_name=<file_name> : set status of file with matching file name in the + --status=<status> : (str) one of "start" or "stop" + --sd_hash=<sd_hash> : (str) set status of file with matching sd hash + --file_name=<file_name> : (str) set status of file with matching file name in the downloads folder - --stream_hash=<stream_hash> : set status of file with matching stream hash - --rowid=<rowid> : set status of file with matching row id + --stream_hash=<stream_hash> : (str) set status of file with matching stream hash + --rowid=<rowid> : (int) set status of file with matching row id Returns: (str) Confirmation message @@ -1710,33 +1737,32 @@ class Daemon(AuthJSONRPCServer): @AuthJSONRPCServer.auth_required @defer.inlineCallbacks - @AuthJSONRPCServer.flags(delete_from_download_dir='-f', delete_all='--delete_all') def jsonrpc_file_delete(self, delete_from_download_dir=False, delete_all=False, **kwargs): """ Delete a LBRY file Usage: - file_delete [-f] [--delete_all] [--sd_hash=<sd_hash>] [--file_name=<file_name>] + file_delete [--delete_from_download_dir] [--delete_all] [--sd_hash=<sd_hash>] [--file_name=<file_name>] [--stream_hash=<stream_hash>] [--rowid=<rowid>] [--claim_id=<claim_id>] [--txid=<txid>] [--nout=<nout>] [--claim_name=<claim_name>] [--channel_claim_id=<channel_claim_id>] [--channel_name=<channel_name>] Options: - -f, --delete_from_download_dir : delete file from download directory, + --delete_from_download_dir : (bool) delete file from download directory, instead of just deleting blobs - --delete_all : if there are multiple matching files, + --delete_all : (bool) if there are multiple matching files, allow the deletion of multiple files. Otherwise do not delete anything. - --sd_hash=<sd_hash> : delete by file sd hash - --file_name<file_name> : delete by file name in downloads folder - --stream_hash=<stream_hash> : delete by file stream hash - --rowid=<rowid> : delete by file row id - --claim_id=<claim_id> : delete by file claim id - --txid=<txid> : delete by file claim txid - --nout=<nout> : delete by file claim nout - --claim_name=<claim_name> : delete by file claim name - --channel_claim_id=<channel_claim_id> : delete by file channel claim id - --channel_name=<channel_name> : delete by file channel claim name + --sd_hash=<sd_hash> : (str) delete by file sd hash + --file_name<file_name> : (str) delete by file name in downloads folder + --stream_hash=<stream_hash> : (str) delete by file stream hash + --rowid=<rowid> : (int) delete by file row id + --claim_id=<claim_id> : (str) delete by file claim id + --txid=<txid> : (str) delete by file claim txid + --nout=<nout> : (int) delete by file claim nout + --claim_name=<claim_name> : (str) delete by file claim name + --channel_claim_id=<channel_claim_id> : (str) delete by file channel claim id + --channel_name=<channel_name> : (str) delete by file channel claim name Returns: (bool) true if deletion was successful @@ -1776,10 +1802,11 @@ class Daemon(AuthJSONRPCServer): Get estimated cost for a lbry stream Usage: - stream_cost_estimate <uri> [<size> | --size=<size>] + stream_cost_estimate (<uri> | --uri=<uri>) [<size> | --size=<size>] Options: - <size>, --size=<size> : stream size in bytes. if provided an sd blob won't be + --uri=<uri> : (str) uri to use + --size=<size> : (float) stream size in bytes. if provided an sd blob won't be downloaded. Returns: @@ -1799,6 +1826,10 @@ class Daemon(AuthJSONRPCServer): channel_new (<channel_name> | --channel_name=<channel_name>) (<amount> | --amount=<amount>) + Options: + --channel_name=<channel_name> : (str) name of the channel prefixed with '@' + --amount=<amount> : (float) bid amount on the channel + Returns: (dict) Dictionary containing result of the claim { @@ -1838,6 +1869,9 @@ class Daemon(AuthJSONRPCServer): Usage: channel_list + Options: + None + Returns: (list) ClaimDict, includes 'is_mine' field to indicate if the certificate claim is in the wallet. @@ -1856,6 +1890,9 @@ class Daemon(AuthJSONRPCServer): Usage: channel_list_mine + Options: + None + Returns: (list) ClaimDict """ @@ -1871,6 +1908,9 @@ class Daemon(AuthJSONRPCServer): Usage: channel_export (<claim_id> | --claim_id=<claim_id>) + Options: + --claim_id=<claim_id> : (str) Claim ID to export information about + Returns: (str) Serialized certificate information """ @@ -1888,6 +1928,9 @@ class Daemon(AuthJSONRPCServer): channel_import (<serialized_certificate_info> | --serialized_certificate_info=<serialized_certificate_info>) + Options: + --serialized_certificate_info=<serialized_certificate_info> : (str) certificate info + Returns: (dict) Result dictionary """ @@ -1928,13 +1971,15 @@ class Daemon(AuthJSONRPCServer): [--claim_address=<claim_address>] [--change_address=<change_address>] Options: - --metadata=<metadata> : ClaimDict to associate with the claim. - --file_path=<file_path> : path to file to be associated with name. If provided, + --name=<name> : (str) name of the content + --bid=<bid> : (float) amount to back the claim + --metadata=<metadata> : (dict) ClaimDict to associate with the claim. + --file_path=<file_path> : (str) path to file to be associated with name. If provided, a lbry stream of this file will be used in 'sources'. If no path is given but a sources dict is provided, it will be used. If neither are provided, an error is raised. - --fee=<fee> : Dictionary representing key fee to download content: + --fee=<fee> : (dict) Dictionary representing key fee to download content: { 'currency': currency_symbol, 'amount': float, @@ -1943,22 +1988,22 @@ class Daemon(AuthJSONRPCServer): supported currencies: LBC, USD, BTC If an address is not provided a new one will be automatically generated. Default fee is zero. - --title=<title> : title of the publication - --description=<description> : description of the publication - --author=<author> : author of the publication - --language=<language> : language of the publication - --license=<license> : publication license - --license_url=<license_url> : publication license url - --thumbnail=<thumbnail> : thumbnail url - --preview=<preview> : preview url - --nsfw=<nsfw> : title of the publication - --sources=<sources> : {'lbry_sd_hash':sd_hash} specifies sd hash of file - --channel_name=<channel_name> : name of the publisher channel name in the wallet - --channel_id=<channel_id> : claim id of the publisher channel, does not check + --title=<title> : (str) title of the publication + --description=<description> : (str) description of the publication + --author=<author> : (str) author of the publication + --language=<language> : (str) language of the publication + --license=<license> : (str) publication license + --license_url=<license_url> : (str) publication license url + --thumbnail=<thumbnail> : (str) thumbnail url + --preview=<preview> : (str) preview url + --nsfw=<nsfw> : (bool) title of the publication + --sources=<sources> : (str) {'lbry_sd_hash': sd_hash} specifies sd hash of file + --channel_name=<channel_name> : (str) name of the publisher channel name in the wallet + --channel_id=<channel_id> : (str) claim id of the publisher channel, does not check for channel claim being in the wallet. This allows publishing to a channel where only the certificate private key is in the wallet. - --claim_address=<claim_address> : address where the claim is sent to, if not specified + --claim_address=<claim_address> : (str) address where the claim is sent to, if not specified new address wil automatically be created Returns: @@ -2106,7 +2151,12 @@ class Daemon(AuthJSONRPCServer): claim_abandon [<claim_id> | --claim_id=<claim_id>] [<txid> | --txid=<txid>] [<nout> | --nout=<nout>] - Return: + Options: + --claim_id=<claim_id> : (str) claim_id of the claim to abandon + --txid=<txid> : (str) txid of the claim to abandon + --nout=<nout> : (int) nout of the claim to abandon + + Returns: (dict) Dictionary containing result of the claim { txid : (str) txid of resulting transaction @@ -2134,7 +2184,12 @@ class Daemon(AuthJSONRPCServer): claim_new_support (<name> | --name=<name>) (<claim_id> | --claim_id=<claim_id>) (<amount> | --amount=<amount>) - Return: + Options: + --name=<name> : (str) name of the claim to support + --claim_id=<claim_id> : (str) claim_id of the claim to support + --amount=<amount> : (float) amount of support + + Returns: (dict) Dictionary containing result of the claim { txid : (str) txid of resulting support claim @@ -2156,7 +2211,11 @@ class Daemon(AuthJSONRPCServer): Usage: claim_renew (<outpoint> | --outpoint=<outpoint>) | (<height> | --height=<height>) - Return: + Options: + --outpoint=<outpoint> : (str) outpoint of the claim to renew + --height=<height> : (str) update claims expiring before or at this block height + + Returns: (dict) Dictionary where key is the the original claim's outpoint and value is the result of the renewal { @@ -2198,8 +2257,21 @@ class Daemon(AuthJSONRPCServer): [<amount> | --amount=<amount>] Options: - <amount> : Amount of credits to claim name for, defaults to the current amount - on the claim + --claim_id=<claim_id> : (str) claim_id to send + --address=<address> : (str) address to send the claim to + --amount<amount> : (int) Amount of credits to claim name for, defaults to the current amount + on the claim + + Returns: + (dict) Dictionary containing result of the claim + { + 'tx' : (str) hex encoded transaction + 'txid' : (str) txid of resulting claim + 'nout' : (int) nout of the resulting claim + 'fee' : (float) fee paid for the claim transaction + 'claim_id' : (str) claim ID of the resulting claim + } + """ result = yield self.session.wallet.send_claim_to_address(claim_id, address, amount) response = yield self._render_response(result) @@ -2214,7 +2286,10 @@ class Daemon(AuthJSONRPCServer): Usage: claim_list_mine - Returns + Options: + None + + Returns: (list) List of name claims owned by user [ { @@ -2249,7 +2324,10 @@ class Daemon(AuthJSONRPCServer): Usage: claim_list (<name> | --name=<name>) - Returns + Options: + --name=<name> : (str) name of the claim to list info about + + Returns: (dict) State of claims assigned for the name { 'claims': (list) list of claims for the name @@ -2286,9 +2364,11 @@ class Daemon(AuthJSONRPCServer): [--page_size=<page_size>] Options: - --page=<page> : which page of results to return where page 1 is the first - page, defaults to no pages - --page_size=<page_size> : number of results in a page, default of 10 + --uri=<uri> : (str) uri of the channel + --uris=<uris> : (list) uris of the channel + --page=<page> : (int) which page of results to return where page 1 is the first + page, defaults to no pages + --page_size=<page_size> : (int) number of results in a page, default of 10 Returns: { @@ -2371,6 +2451,9 @@ class Daemon(AuthJSONRPCServer): Usage: transaction_list + Options: + None + Returns: (list) List of transactions @@ -2429,6 +2512,9 @@ class Daemon(AuthJSONRPCServer): Usage: transaction_show (<txid> | --txid=<txid>) + Options: + --txid=<txid> : (str) txid of the transaction + Returns: (dict) JSON formatted transaction """ @@ -2445,6 +2531,9 @@ class Daemon(AuthJSONRPCServer): Usage: wallet_is_address_mine (<address> | --address=<address>) + Options: + --address=<address> : (str) address to check + Returns: (bool) true, if address is associated with current wallet """ @@ -2461,6 +2550,9 @@ class Daemon(AuthJSONRPCServer): Usage: wallet_public_key (<address> | --address=<address>) + Options: + --address=<address> : (str) address for which to get the public key + Returns: (list) list of public keys associated with address. Could contain more than one public key if multisig. @@ -2479,6 +2571,9 @@ class Daemon(AuthJSONRPCServer): Usage: wallet_list + Options: + None + Returns: List of wallet addresses """ @@ -2495,6 +2590,9 @@ class Daemon(AuthJSONRPCServer): Usage: wallet_new_address + Options: + None + Returns: (str) New wallet address in base58 """ @@ -2517,6 +2615,9 @@ class Daemon(AuthJSONRPCServer): Usage: wallet_unused_address + Options: + None + Returns: (str) Unused wallet address in base58 """ @@ -2540,6 +2641,10 @@ class Daemon(AuthJSONRPCServer): Usage: send_amount_to_address (<amount> | --amount=<amount>) (<address> | --address=<address>) + Options: + --amount=<amount> : (float) amount to send + --address=<address> : (str) address to send credits to + Returns: (bool) true if payment successfully scheduled """ @@ -2568,7 +2673,12 @@ class Daemon(AuthJSONRPCServer): wallet_send (<amount> | --amount=<amount>) ((<address> | --address=<address>) | (<claim_id> | --claim_id=<claim_id>)) - Return: + Options: + --amount=<amount> : (float) amount of credit to send + --address=<address> : (str) address to send credits to + --claim_id=<claim_id> : (float) claim_id of the claim to send to tip to + + Returns: If sending to an address: (bool) true if payment successfully scheduled @@ -2612,6 +2722,11 @@ class Daemon(AuthJSONRPCServer): (<num_addresses> | --num_addresses=<num_addresses>) (<amount> | --amount=<amount>) + Options: + --no_broadcast : (bool) whether to broadcast or not + --num_addresses=<num_addresses> : (int) num of addresses to create + --amount=<amount> : (float) initial amount in each address + Returns: (dict) the resulting transaction """ @@ -2635,6 +2750,9 @@ class Daemon(AuthJSONRPCServer): Usage: utxo_list + Options: + None + Returns: (list) List of unspent transaction outputs (UTXOs) [ @@ -2671,8 +2789,8 @@ class Daemon(AuthJSONRPCServer): block_show (<blockhash> | --blockhash=<blockhash>) | (<height> | --height=<height>) Options: - <blockhash>, --blockhash=<blockhash> : hash of the block to look up - <height>, --height=<height> : height of the block to look up + --blockhash=<blockhash> : (str) hash of the block to look up + --height=<height> : (int) height of the block to look up Returns: (dict) Requested block @@ -2701,17 +2819,18 @@ class Daemon(AuthJSONRPCServer): [--encoding=<encoding>] [--payment_rate_manager=<payment_rate_manager>] Options: - --timeout=<timeout> : timeout in number of seconds - --encoding=<encoding> : by default no attempt at decoding is made, - can be set to one of the + --blob_hash=<blob_hash> : (str) blob hash of the blob to get + --timeout=<timeout> : (int) timeout in number of seconds + --encoding=<encoding> : (str) by default no attempt at decoding + is made, can be set to one of the following decoders: 'json' - --payment_rate_manager=<payment_rate_manager> : if not given the default payment rate + --payment_rate_manager=<payment_rate_manager> : (str) if not given the default payment rate manager will be used. supported alternative rate managers: 'only-free' - Returns + Returns: (str) Success/Fail message or (dict) decoded data """ @@ -2742,6 +2861,9 @@ class Daemon(AuthJSONRPCServer): Usage: blob_delete (<blob_hash> | --blob_hash=<blob_hash) + Options: + --blob_hash=<blob_hash> : (str) blob hash of the blob to delete + Returns: (str) Success/fail message """ @@ -2766,7 +2888,8 @@ class Daemon(AuthJSONRPCServer): peer_list (<blob_hash> | --blob_hash=<blob_hash>) [<timeout> | --timeout=<timeout>] Options: - <timeout>, --timeout=<timeout> : peer search timeout in seconds + --blob_hash=<blob_hash> : (str) find available peers for this blob hash + --timeout=<timeout> : (int) peer search timeout in seconds Returns: (list) List of contacts @@ -2780,24 +2903,23 @@ class Daemon(AuthJSONRPCServer): return d @defer.inlineCallbacks - @AuthJSONRPCServer.flags(announce_all="-a") def jsonrpc_blob_announce(self, announce_all=None, blob_hash=None, stream_hash=None, sd_hash=None): """ Announce blobs to the DHT Usage: - blob_announce [-a] [<blob_hash> | --blob_hash=<blob_hash>] + blob_announce [--announce_all] [<blob_hash> | --blob_hash=<blob_hash>] [<stream_hash> | --stream_hash=<stream_hash>] [<sd_hash> | --sd_hash=<sd_hash>] Options: - -a : announce all the blobs possessed by user - <blob_hash>, --blob_hash=<blob_hash> : announce a blob, specified by blob_hash - <stream_hash>, --stream_hash=<stream_hash> : announce all blobs associated with - stream_hash - <sd_hash>, --sd_hash=<sd_hash> : announce all blobs associated with - sd_hash and the sd_hash itself + --announce_all=<announce_all> : (bool) announce all the blobs possessed by user + --blob_hash=<blob_hash> : (str) announce a blob, specified by blob_hash + --stream_hash=<stream_hash> : (str) announce all blobs associated with + stream_hash + --sd_hash=<sd_hash> : (str) announce all blobs associated with + sd_hash and the sd_hash itself Returns: (bool) true if successful @@ -2830,6 +2952,9 @@ class Daemon(AuthJSONRPCServer): Usage: blob_announce_all + Options: + None + Returns: (str) Success/fail message """ @@ -2846,12 +2971,12 @@ class Daemon(AuthJSONRPCServer): [--reflector=<reflector>] Options: - --sd_hash=<sd_hash> : get file with matching sd hash - --file_name=<file_name> : get file with matching file name in the + --sd_hash=<sd_hash> : (str) get file with matching sd hash + --file_name=<file_name> : (str) get file with matching file name in the downloads folder - --stream_hash=<stream_hash> : get file with matching stream hash - --rowid=<rowid> : get file with matching row id - --reflector=<reflector> : reflector server, ip address or url + --stream_hash=<stream_hash> : (str) get file with matching stream hash + --rowid=<rowid> : (int) get file with matching row id + --reflector=<reflector> : (str) reflector server, ip address or url by default choose a server from the config Returns: @@ -2871,25 +2996,26 @@ class Daemon(AuthJSONRPCServer): defer.returnValue(results) @defer.inlineCallbacks - @AuthJSONRPCServer.flags(needed="-n", finished="-f") def jsonrpc_blob_list(self, uri=None, stream_hash=None, sd_hash=None, needed=None, finished=None, page_size=None, page=None): """ Returns blob hashes. If not given filters, returns all blobs known by the blob manager Usage: - blob_list [-n] [-f] [<uri> | --uri=<uri>] [<stream_hash> | --stream_hash=<stream_hash>] - [<sd_hash> | --sd_hash=<sd_hash>] [<page_size> | --page_size=<page_size>] + blob_list [--needed] [--finished] [<uri> | --uri=<uri>] + [<stream_hash> | --stream_hash=<stream_hash>] + [<sd_hash> | --sd_hash=<sd_hash>] + [<page_size> | --page_size=<page_size>] [<page> | --page=<page>] Options: - -n : only return needed blobs - -f : only return finished blobs - <uri>, --uri=<uri> : filter blobs by stream in a uri - <stream_hash>, --stream_hash=<stream_hash> : filter blobs by stream hash - <sd_hash>, --sd_hash=<sd_hash> : filter blobs by sd hash - <page_size>, --page_size=<page_size> : results page size - <page>, --page=<page> : page of results to return + --needed : (bool) only return needed blobs + --finished : (bool) only return finished blobs + --uri=<uri> : (str) filter blobs by stream in a uri + --stream_hash=<stream_hash> : (str) filter blobs by stream hash + --sd_hash=<sd_hash> : (str) filter blobs by sd hash + --page_size=<page_size> : (int) results page size + --page=<page> : (int) page of results to return Returns: (list) List of blob hashes @@ -2935,6 +3061,9 @@ class Daemon(AuthJSONRPCServer): Usage: blob_reflect_all + Options: + None + Returns: (bool) true if successful """ @@ -2951,6 +3080,9 @@ class Daemon(AuthJSONRPCServer): Usage: routing_table_get + Options: + None + Returns: (dict) dictionary containing routing and contact information { @@ -3028,8 +3160,10 @@ class Daemon(AuthJSONRPCServer): [<blob_timeout> | --blob_timeout=<blob_timeout>] Options: - <search_timeout> : how long to search for peers for the blob in the dht - <blob_timeout> : how long to try downloading from a peer + --blob_hash=<blob_hash> : (str) check availability for this blob hash + --search_timeout=<search_timeout> : (int) how long to search for peers for the blob + in the dht + --blob_timeout=<blob_timeout> : (int) how long to try downloading from a peer Returns: (dict) { @@ -3051,8 +3185,9 @@ class Daemon(AuthJSONRPCServer): [<peer_timeout> | --peer_timeout=<peer_timeout>] Options: - <sd_timeout>, --sd_timeout=<sd_timeout> : sd blob download timeout - <peer_timeout>, --peer_timeout=<peer_timeout> : how long to look for peers + --uri=<uri> : (str) check availability for this uri + --sd_timeout=<sd_timeout> : (int) sd blob download timeout + --peer_timeout=<peer_timeout> : (int) how long to look for peers Returns: (float) Peers per blob / total blobs @@ -3066,12 +3201,15 @@ class Daemon(AuthJSONRPCServer): Get stream availability for lbry uri Usage: - stream_availability (<uri>) [<search_timeout> | --search_timeout=<search_timeout>] + stream_availability (<uri> | --uri=<uri>) + [<search_timeout> | --search_timeout=<search_timeout>] [<blob_timeout> | --blob_timeout=<blob_timeout>] Options: - <search_timeout> : how long to search for peers for the blob in the dht - <blob_timeout> : how long to try downloading from a peer + --uri=<uri> : (str) check availability for this uri + --search_timeout=<search_timeout> : (int) how long to search for peers for the blob + in the dht + --search_timeout=<blob_timeout> : (int) how long to try downloading from a peer Returns: (dict) { @@ -3160,21 +3298,22 @@ class Daemon(AuthJSONRPCServer): defer.returnValue(response) @defer.inlineCallbacks - @AuthJSONRPCServer.flags(a_arg='-a', b_arg='-b') def jsonrpc_cli_test_command(self, pos_arg, pos_args=[], pos_arg2=None, pos_arg3=None, a_arg=False, b_arg=False): """ This command is only for testing the CLI argument parsing Usage: - cli_test_command [-a] [-b] (<pos_arg> | --pos_arg=<pos_arg>) + cli_test_command [--a_arg] [--b_arg] (<pos_arg> | --pos_arg=<pos_arg>) [<pos_args>...] [--pos_arg2=<pos_arg2>] [--pos_arg3=<pos_arg3>] Options: - -a, --a_arg : a arg - -b, --b_arg : b arg - <pos_arg2>, --pos_arg2=<pos_arg2> : pos arg 2 - <pos_arg3>, --pos_arg3=<pos_arg3> : pos arg 3 + --a_arg : a arg + --b_arg : b arg + --pos_arg=<pos_arg> : pos arg + --pos_args=<pos_args> : pos args + --pos_arg2=<pos_arg2> : pos arg 2 + --pos_arg3=<pos_arg3> : pos arg 3 Returns: pos args """ diff --git a/mkdocs.yml b/mkdocs.yml index 6a700297c..1455aa742 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -3,7 +3,7 @@ repo_url: https://github.com/lbryio/lbry pages: - "API": index.md - "CLI": cli.md -theme: material +theme: material site_dir: docs_build google_analytics: - 'UA-60403362-1' diff --git a/scripts/gen_api_docs.py b/scripts/gen_api_docs.py index b13cbf6e0..0363f35ca 100644 --- a/scripts/gen_api_docs.py +++ b/scripts/gen_api_docs.py @@ -5,63 +5,99 @@ # Push docs: mkdocs gh-deploy import inspect -import os.path as op -import re import sys - +import re +import os.path as op +from tabulate import tabulate from lbrynet.daemon.Daemon import Daemon - -def _name(obj): - if hasattr(obj, '__name__'): - return obj.__name__ - elif inspect.isdatadescriptor(obj): - return obj.fget.__name__ +INDENT = " " +REQD_CMD_REGEX = r"\(.*?=<(?P<reqd>.*?)>\)" +OPT_CMD_REGEX = r"\[.*?=<(?P<opt>.*?)>\]" +CMD_REGEX = r"--.*?(?P<cmd>.*?)[=,\s,<]" -def _anchor(name): - anchor = name.lower().replace(' ', '-') - anchor = re.sub(r'[^\w\- ]', '', anchor) - return anchor +def _tabulate_options(_options_docstr, method, reqd_matches, opt_matches): + _option_list = [] + for line in _options_docstr.splitlines(): + if (line.strip().startswith("--")): + # separates command name and description + parts = line.split(":", 1) + # checks whether the command is optional or required + # and remove the cli type formatting and convert to + # api style formatitng + match = re.findall(CMD_REGEX, parts[0]) -_docstring_header_pattern = re.compile(r'^([^\n]+)\n[\-\=]{3,}$', flags=re.MULTILINE) -_docstring_parameters_pattern = re.compile(r'^([^ \n]+) \: ([^\n]+)$', flags=re.MULTILINE) + if match[0] not in reqd_matches: + parts[0] = "'" + match[0] + "' (optional)" + else: + parts[0] = "'" + match[0] + "'" + # separates command type(in brackets) and description + new_parts = parts[1].lstrip().split(" ", 1) + else: + parts = [line] -def _replace_docstring_header(paragraph): - """Process NumPy-like function docstrings.""" + # len will be 2 when there's cmd name and description + if len(parts) == 2: + _option_list.append([parts[0], ":", new_parts[0], new_parts[1]]) + # len will be 1 when there's continuation of multiline description in the next line + # check `blob_announce`'s `stream_hash` command + elif len(parts) == 1: + _option_list.append([None, None, None, parts[0]]) + else: + print "Error: Ill formatted doc string for {}".format(method) + print "Error causing line: {}".format(line) - # Replace Markdown headers in docstrings with light headers in bold. - paragraph = re.sub(_docstring_header_pattern, r'*\1*', paragraph) - paragraph = re.sub(_docstring_parameters_pattern, r'\n* `\1` (\2)\n', paragraph) - return paragraph + # tabulate to make the options look pretty + _options_docstr_no_indent = tabulate(_option_list, missingval="", tablefmt="plain") + + # tabulate to make the options look pretty + _options_docstr = "" + for line in _options_docstr_no_indent.splitlines(): + _options_docstr += INDENT + line + '\n' + + return _options_docstr def _doc(obj): docstr = (inspect.getdoc(obj) or '').strip() - return _replace_docstring_header(docstr) + try: + _desc, _docstr_after_desc = docstr.split("Usage:", 1) + _usage_docstr, _docstr_after_options = _docstr_after_desc.split("Options:", 1) + _options_docstr, _returns_docstr = _docstr_after_options.split("Returns:", 1) + except(ValueError): + print "Error: Ill formatted doc string for {}".format(obj) + return "Error!" -def _link(name, anchor=None): - return "[{name}](#{anchor})".format(name=name, anchor=anchor or _anchor(name)) + opt_matches = re.findall(OPT_CMD_REGEX, _usage_docstr) + reqd_matches = re.findall(REQD_CMD_REGEX, _usage_docstr) + + _options_docstr = _tabulate_options(_options_docstr.strip(), obj, reqd_matches, opt_matches) + + docstr = _desc + \ + "Args:\n" + \ + _options_docstr + \ + "\nReturns:" + \ + _returns_docstr + + return docstr def main(): curdir = op.dirname(op.realpath(__file__)) - cli_doc_path = op.realpath(op.join(curdir, '..', 'docs', 'cli.md')) + api_doc_path = op.realpath(op.join(curdir, '..', 'docs', 'index.md')) - # toc = '' - doc = '' - # Table of contents + docs = '' for method_name in sorted(Daemon.callable_methods.keys()): method = Daemon.callable_methods[method_name] - # toc += '* ' + _link(method_name, _anchor(method_name)) + "\n" - doc += '## ' + method_name + "\n\n```text\n" + _doc(method) + "\n```\n\n" + docs += '## ' + method_name + "\n\n```text\n" + _doc(method) + "\n```\n\n" - text = "# LBRY Command Line Documentation\n\n" + doc - with open(cli_doc_path, 'w+') as f: - f.write(text) + docs = "# LBRY JSON-RPC API Documentation\n\n" + docs + with open(api_doc_path, 'w+') as f: + f.write(docs) if __name__ == '__main__': diff --git a/scripts/gen_cli_docs.py b/scripts/gen_cli_docs.py new file mode 100644 index 000000000..f6d34bcbc --- /dev/null +++ b/scripts/gen_cli_docs.py @@ -0,0 +1,85 @@ +# -*- coding: utf-8 -*- + +# Generate docs: python gen_api_docs.py +# See docs: pip install mkdocs; mkdocs serve +# Push docs: mkdocs gh-deploy + +import inspect +import os.path as op +import sys +from tabulate import tabulate +from lbrynet.daemon.Daemon import Daemon + +INDENT = " " + + +def _tabulate_options(_options_docstr, method): + _option_list = [] + for line in _options_docstr.splitlines(): + if (line.strip().startswith("--")): + # separates command name and description + parts = line.split(":", 1) + # separates command type(in brackets) and description + new_parts = parts[1].lstrip().split(" ", 1) + else: + parts = [line] + + # len will be 2 when there's cmd name and description + if len(parts) == 2: + _option_list.append([parts[0], ":", new_parts[0], new_parts[1]]) + # len will be 1 when there's continuation of multiline description in the next line + # check `blob_announce`'s `stream_hash` command + elif len(parts) == 1: + _option_list.append([None, None, None, parts[0]]) + else: + print "Error: Ill formatted doc string for {}".format(method) + print "Error causing line: {}".format(line) + + # tabulate to make the options look pretty + _options_docstr_no_indent = tabulate(_option_list, missingval="", tablefmt="plain") + + # Indent the options properly + _options_docstr = "" + for line in _options_docstr_no_indent.splitlines(): + _options_docstr += INDENT + line + '\n' + + return _options_docstr + + +def _doc(obj): + docstr = (inspect.getdoc(obj) or '').strip() + + try: + _usage_docstr, _docstr_after_options = docstr.split("Options:", 1) + _options_docstr, _returns_docstr = _docstr_after_options.split("Returns:", 1) + except(ValueError): + print "Error: Ill formatted doc string for {}".format(obj) + return "Error!" + + _options_docstr = _tabulate_options(_options_docstr.strip(), obj) + + docstr = _usage_docstr + \ + "\nOptions:\n" + \ + _options_docstr + \ + "\nReturns:" + \ + _returns_docstr + + return docstr + + +def main(): + curdir = op.dirname(op.realpath(__file__)) + cli_doc_path = op.realpath(op.join(curdir, '..', 'docs', 'cli.md')) + + docs = '' + for method_name in sorted(Daemon.callable_methods.keys()): + method = Daemon.callable_methods[method_name] + docs += '## ' + method_name + "\n\n```text\n" + _doc(method) + "\n```\n\n" + + docs = "# LBRY Command Line Documentation\n\n" + docs + with open(cli_doc_path, 'w+') as f: + f.write(docs) + + +if __name__ == '__main__': + sys.exit(main()) From 14c01b095bd75a36f893d84f4feb22dfd57656b0 Mon Sep 17 00:00:00 2001 From: hackrush <atulshyan96@gmail.com> Date: Sun, 25 Feb 2018 23:27:41 +0530 Subject: [PATCH 2/5] Added tests for removing short args in cli --- lbrynet/tests/integration/test_integration.py | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/lbrynet/tests/integration/test_integration.py b/lbrynet/tests/integration/test_integration.py index f0f07f6a9..1b09f1e20 100644 --- a/lbrynet/tests/integration/test_integration.py +++ b/lbrynet/tests/integration/test_integration.py @@ -52,6 +52,7 @@ class TestIntegration(unittest.TestCase): def tearDownClass(cls): shell_command(['lbrynet-cli', 'daemon_stop']) + def test_cli(self): help_out,err = lbrynet_cli(['help']) self.assertTrue(help_out) @@ -66,6 +67,7 @@ class TestIntegration(unittest.TestCase): out = json.loads(out) self.assertTrue(out['is_running']) + def test_cli_docopts(self): out,err = lbrynet_cli(['cli_test_command']) self.assertEqual('',out) @@ -92,18 +94,23 @@ class TestIntegration(unittest.TestCase): # TODO: variable length arguments don't have guess_type() on them self.assertEqual([1,['2','3'],None,None,False,False], out) - out,err = lbrynet_cli(['cli_test_command','1','-a']) - out = json.loads(out) - self.assertEqual([1,[],None,None,True,False], out) - out,err = lbrynet_cli(['cli_test_command','1','--a_arg']) out = json.loads(out) self.assertEqual([1,[],None,None,True,False], out) - out,err = lbrynet_cli(['cli_test_command','1','-a','-b']) + out,err = lbrynet_cli(['cli_test_command','1','--a_arg', '--b_arg']) out = json.loads(out) self.assertEqual([1,[],None,None,True,True], out) + + def test_cli_docopts_with_short_args(self): + out,err = lbrynet_cli(['cli_test_command','1','-a']) + self.assertRaises(ValueError, json.loads, out) + + out,err = lbrynet_cli(['cli_test_command','1','-a','-b']) + self.assertRaises(ValueError, json.loads, out) + + def test_status(self): out = lbrynet.status() self.assertTrue(out['is_running']) From 30bfb9ac882db9a78bfa4639194e90435460f13c Mon Sep 17 00:00:00 2001 From: hackrush <atulshyan96@gmail.com> Date: Wed, 28 Feb 2018 14:29:35 +0530 Subject: [PATCH 3/5] Review and script fixes --- CHANGELOG.md | 4 +- docs/404.html | 315 ++ docs/assets/images/favicon.png | Bin 0 -> 521 bytes .../images/icons/bitbucket.4ebea66e.svg | 20 + docs/assets/images/icons/github.a4034fb1.svg | 18 + docs/assets/images/icons/gitlab.d80e5efc.svg | 38 + .../javascripts/application.02434462.js | 1 + docs/assets/javascripts/lunr/lunr.da.js | 1 + docs/assets/javascripts/lunr/lunr.de.js | 1 + docs/assets/javascripts/lunr/lunr.du.js | 1 + docs/assets/javascripts/lunr/lunr.es.js | 1 + docs/assets/javascripts/lunr/lunr.fi.js | 1 + docs/assets/javascripts/lunr/lunr.fr.js | 1 + docs/assets/javascripts/lunr/lunr.hu.js | 1 + docs/assets/javascripts/lunr/lunr.it.js | 1 + docs/assets/javascripts/lunr/lunr.jp.js | 1 + docs/assets/javascripts/lunr/lunr.multi.js | 1 + docs/assets/javascripts/lunr/lunr.no.js | 1 + docs/assets/javascripts/lunr/lunr.pt.js | 1 + docs/assets/javascripts/lunr/lunr.ro.js | 1 + docs/assets/javascripts/lunr/lunr.ru.js | 1 + .../javascripts/lunr/lunr.stemmer.support.js | 1 + docs/assets/javascripts/lunr/lunr.sv.js | 1 + docs/assets/javascripts/lunr/lunr.tr.js | 1 + docs/assets/javascripts/lunr/tinyseg.js | 1 + docs/assets/javascripts/modernizr.1aa3b519.js | 1 + .../application-palette.6079476c.css | 2 + .../stylesheets/application.78aab2dc.css | 2 + docs/cli.md | 1483 ---------- docs/cli/index.html | 2530 +++++++++++++++++ docs/index.html | 2262 +++++++++++++++ docs/index.md | 1215 -------- docs/search/search_index.json | 564 ++++ mkdocs.yml | 3 +- scripts/gen_api_docs.py | 5 +- scripts/gen_cli_docs.py | 7 +- scripts/gen_docs.py | 10 + 37 files changed, 5792 insertions(+), 2706 deletions(-) create mode 100644 docs/404.html create mode 100644 docs/assets/images/favicon.png create mode 100644 docs/assets/images/icons/bitbucket.4ebea66e.svg create mode 100644 docs/assets/images/icons/github.a4034fb1.svg create mode 100644 docs/assets/images/icons/gitlab.d80e5efc.svg create mode 100644 docs/assets/javascripts/application.02434462.js create mode 100644 docs/assets/javascripts/lunr/lunr.da.js create mode 100644 docs/assets/javascripts/lunr/lunr.de.js create mode 100644 docs/assets/javascripts/lunr/lunr.du.js create mode 100644 docs/assets/javascripts/lunr/lunr.es.js create mode 100644 docs/assets/javascripts/lunr/lunr.fi.js create mode 100644 docs/assets/javascripts/lunr/lunr.fr.js create mode 100644 docs/assets/javascripts/lunr/lunr.hu.js create mode 100644 docs/assets/javascripts/lunr/lunr.it.js create mode 100644 docs/assets/javascripts/lunr/lunr.jp.js create mode 100644 docs/assets/javascripts/lunr/lunr.multi.js create mode 100644 docs/assets/javascripts/lunr/lunr.no.js create mode 100644 docs/assets/javascripts/lunr/lunr.pt.js create mode 100644 docs/assets/javascripts/lunr/lunr.ro.js create mode 100644 docs/assets/javascripts/lunr/lunr.ru.js create mode 100644 docs/assets/javascripts/lunr/lunr.stemmer.support.js create mode 100644 docs/assets/javascripts/lunr/lunr.sv.js create mode 100644 docs/assets/javascripts/lunr/lunr.tr.js create mode 100644 docs/assets/javascripts/lunr/tinyseg.js create mode 100644 docs/assets/javascripts/modernizr.1aa3b519.js create mode 100644 docs/assets/stylesheets/application-palette.6079476c.css create mode 100644 docs/assets/stylesheets/application.78aab2dc.css delete mode 100644 docs/cli.md create mode 100644 docs/cli/index.html create mode 100644 docs/index.html delete mode 100644 docs/index.md create mode 100644 docs/search/search_index.json create mode 100644 scripts/gen_docs.py diff --git a/CHANGELOG.md b/CHANGELOG.md index 9e360faa6..75ea8d158 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -29,7 +29,7 @@ at anytime. * daemon not erring properly for non-numeric values being passed to the `bid` parameter for the `publish` method * `publish` command to allow updating claims with a `bid` amount higher than the wallet balance, so long as the amount is less than the wallet balance plus the bid amount of the claim being updated (https://github.com/lbryio/lbry/issues/748) * incorrect `blob_num` for the stream terminator blob, which would result in creating invalid streams. Such invalid streams are detected on startup and are automatically removed (https://github.com/lbryio/lbry/issues/1124) - * fixed the inconsistencies in docstrings + * fixed the inconsistencies in API and CLI docstrings * ### Deprecated @@ -149,7 +149,7 @@ at anytime. * unnecessary `TempBlobManager` class * old storage classes used by the file manager, wallet, and blob manager * old `.db` database files from the data directory - * short(single dashed) arguments + * short(single dashed) arguments for `lbrynet-cli` ## [0.18.0] - 2017-11-08 ### Fixed diff --git a/docs/404.html b/docs/404.html new file mode 100644 index 000000000..198f74467 --- /dev/null +++ b/docs/404.html @@ -0,0 +1,315 @@ + + + + +<!DOCTYPE html> +<html lang="en" class="no-js"> + <head> + + <meta charset="utf-8"> + <meta name="viewport" content="width=device-width,initial-scale=1"> + <meta http-equiv="x-ua-compatible" content="ie=edge"> + + + + + <meta name="lang:clipboard.copy" content="Copy to clipboard"> + + <meta name="lang:clipboard.copied" content="Copied to clipboard"> + + <meta name="lang:search.language" content="en"> + + <meta name="lang:search.pipeline.stopwords" content="True"> + + <meta name="lang:search.pipeline.trimmer" content="True"> + + <meta name="lang:search.result.none" content="No matching documents"> + + <meta name="lang:search.result.one" content="1 matching document"> + + <meta name="lang:search.result.other" content="# matching documents"> + + <meta name="lang:search.tokenizer" content="[\s\-]+"> + + <link rel="shortcut icon" href="/assets/images/favicon.png"> + <meta name="generator" content="mkdocs-0.17.2, mkdocs-material-2.6.6"> + + + + <title>LBRY + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ +
+ + + + +
+
+ + +
+
+
+ +
+
+
+ + + +
+
+ +

404 - Not found

+ + + + +
+
+
+
+ + + + +
+ + + + + + + + + + + + + \ No newline at end of file diff --git a/docs/assets/images/favicon.png b/docs/assets/images/favicon.png new file mode 100644 index 0000000000000000000000000000000000000000..76d17f57ad903c3ea2f1b564cafb95bf9af84ee3 GIT binary patch literal 521 zcmV+k0`~ohP)kdg0005dNkl2WptjAn6@db&Pvy?U$ zv>P|<&rCZfZF0jmq0opf8)91(A<*iIVPPJJT((+JiF~>9KAA3%heFdnI;SaK+~|aU zQ~!x`%y{jX1<~SK2RxN7Db8`yWBbf6p7&07{VXfaam*cUs&eu*Zu(xaIL8rP){;a< zS~$}^Td32Rw+W1TqTd|L{#~jJet4!qwKsb5hq%YXiiUV!yH=ltu0>s|FLsT+Iy7K~ z!6*Z0a@vQ;AiZo!=s{{fqR+ct6YQPzbk+j}*qe7vtu39I7 zrOtZqU}=NnLchJxsU9iY+}3TYDl|BvPsX%E@dlyLgdV%q$UP|Y?DfcGb`}K&$;drd z+hL;zy7UTccUYU+h`ONIU|d=%`(0$=KW4%tVWXj~AE + + diff --git a/docs/assets/images/icons/github.a4034fb1.svg b/docs/assets/images/icons/github.a4034fb1.svg new file mode 100644 index 000000000..f8944b015 --- /dev/null +++ b/docs/assets/images/icons/github.a4034fb1.svg @@ -0,0 +1,18 @@ + + + diff --git a/docs/assets/images/icons/gitlab.d80e5efc.svg b/docs/assets/images/icons/gitlab.d80e5efc.svg new file mode 100644 index 000000000..cda66137a --- /dev/null +++ b/docs/assets/images/icons/gitlab.d80e5efc.svg @@ -0,0 +1,38 @@ + + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/assets/javascripts/application.02434462.js b/docs/assets/javascripts/application.02434462.js new file mode 100644 index 000000000..b05d8dbb5 --- /dev/null +++ b/docs/assets/javascripts/application.02434462.js @@ -0,0 +1 @@ +!function(e,t){for(var n in t)e[n]=t[n]}(window,function(e){function t(r){if(n[r])return n[r].exports;var i=n[r]={i:r,l:!1,exports:{}};return e[r].call(i.exports,i,i.exports,t),i.l=!0,i.exports}var n={};return t.m=e,t.c=n,t.d=function(e,n,r){t.o(e,n)||Object.defineProperty(e,n,{configurable:!1,enumerable:!0,get:r})},t.n=function(e){var n=e&&e.__esModule?function(){return e.default}:function(){return e};return t.d(n,"a",n),n},t.o=function(e,t){return Object.prototype.hasOwnProperty.call(e,t)},t.p="",t(t.s=6)}([function(e,t,n){"use strict";t.__esModule=!0,t.default={createElement:function(e,t){var n=document.createElement(e);t&&Array.prototype.forEach.call(Object.keys(t),function(e){n.setAttribute(e,t[e])});for(var r=arguments.length,i=Array(r>2?r-2:0),o=2;o pre, pre > code");Array.prototype.forEach.call(n,function(t,n){var r="__code_"+n,i=e.createElement("button",{class:"md-clipboard",title:h("clipboard.copy"),"data-clipboard-target":"#"+r+" pre, #"+r+" code"},e.createElement("span",{class:"md-clipboard__message"})),o=t.parentNode;o.id=r,o.insertBefore(i,t)});new c.default(".md-clipboard").on("success",function(e){var t=e.trigger.querySelector(".md-clipboard__message");if(!(t instanceof HTMLElement))throw new ReferenceError;e.clearSelection(),t.dataset.mdTimer&&clearTimeout(parseInt(t.dataset.mdTimer,10)),t.classList.add("md-clipboard__message--active"),t.innerHTML=h("clipboard.copied"),t.dataset.mdTimer=setTimeout(function(){t.classList.remove("md-clipboard__message--active"),t.dataset.mdTimer=""},2e3).toString()})}if(!Modernizr.details){var r=document.querySelectorAll("details > summary");Array.prototype.forEach.call(r,function(e){e.addEventListener("click",function(e){var t=e.target.parentNode;t.hasAttribute("open")?t.removeAttribute("open"):t.setAttribute("open","")})})}var i=function(){if(document.location.hash){var e=document.getElementById(document.location.hash.substring(1));if(!e)return;for(var t=e.parentNode;t&&!(t instanceof HTMLDetailsElement);)t=t.parentNode;if(t&&!t.open){t.open=!0;var n=location.hash;location.hash=" ",location.hash=n}}};if(window.addEventListener("hashchange",i),i(),Modernizr.ios){var o=document.querySelectorAll("[data-md-scrollfix]");Array.prototype.forEach.call(o,function(e){e.addEventListener("touchstart",function(){var t=e.scrollTop;0===t?e.scrollTop=1:t+e.offsetHeight===e.scrollHeight&&(e.scrollTop=t-1)})})}}).listen(),new f.default.Event.Listener(window,["scroll","resize","orientationchange"],new f.default.Header.Shadow("[data-md-component=container]","[data-md-component=header]")).listen(),new f.default.Event.Listener(window,["scroll","resize","orientationchange"],new f.default.Header.Title("[data-md-component=title]",".md-typeset h1")).listen(),document.querySelector("[data-md-component=hero]")&&new f.default.Event.Listener(window,["scroll","resize","orientationchange"],new f.default.Tabs.Toggle("[data-md-component=hero]")).listen(),document.querySelector("[data-md-component=tabs]")&&new f.default.Event.Listener(window,["scroll","resize","orientationchange"],new f.default.Tabs.Toggle("[data-md-component=tabs]")).listen(),new f.default.Event.MatchMedia("(min-width: 1220px)",new f.default.Event.Listener(window,["scroll","resize","orientationchange"],new f.default.Sidebar.Position("[data-md-component=navigation]","[data-md-component=header]"))),document.querySelector("[data-md-component=toc]")&&new f.default.Event.MatchMedia("(min-width: 960px)",new f.default.Event.Listener(window,["scroll","resize","orientationchange"],new f.default.Sidebar.Position("[data-md-component=toc]","[data-md-component=header]"))),new f.default.Event.MatchMedia("(min-width: 960px)",new f.default.Event.Listener(window,"scroll",new f.default.Nav.Blur("[data-md-component=toc] [href]")));var n=document.querySelectorAll("[data-md-component=collapsible]");Array.prototype.forEach.call(n,function(e){new f.default.Event.MatchMedia("(min-width: 1220px)",new f.default.Event.Listener(e.previousElementSibling,"click",new f.default.Nav.Collapse(e)))}),new f.default.Event.MatchMedia("(max-width: 1219px)",new f.default.Event.Listener("[data-md-component=navigation] [data-md-toggle]","change",new f.default.Nav.Scrolling("[data-md-component=navigation] nav"))),document.querySelector("[data-md-component=search]")&&(new f.default.Event.MatchMedia("(max-width: 959px)",new f.default.Event.Listener("[data-md-toggle=search]","change",new f.default.Search.Lock("[data-md-toggle=search]"))),new f.default.Event.Listener("[data-md-component=query]",["focus","keyup","change"],new f.default.Search.Result("[data-md-component=result]",function(){return fetch(t.url.base+"/"+(t.version<"0.17"?"mkdocs":"search")+"/search_index.json",{credentials:"same-origin"}).then(function(e){return e.json()}).then(function(e){return e.docs.map(function(e){return e.location=t.url.base+e.location,e})})})).listen(),new f.default.Event.Listener("[data-md-component=reset]","click",function(){setTimeout(function(){var e=document.querySelector("[data-md-component=query]");if(!(e instanceof HTMLInputElement))throw new ReferenceError;e.focus()},10)}).listen(),new f.default.Event.Listener("[data-md-toggle=search]","change",function(e){setTimeout(function(e){if(!(e instanceof HTMLInputElement))throw new ReferenceError;if(e.checked){var t=document.querySelector("[data-md-component=query]");if(!(t instanceof HTMLInputElement))throw new ReferenceError;t.focus()}},400,e.target)}).listen(),new f.default.Event.MatchMedia("(min-width: 960px)",new f.default.Event.Listener("[data-md-component=query]","focus",function(){var e=document.querySelector("[data-md-toggle=search]");if(!(e instanceof HTMLInputElement))throw new ReferenceError;e.checked||(e.checked=!0,e.dispatchEvent(new CustomEvent("change")))})),new f.default.Event.Listener(window,"keydown",function(e){var t=document.querySelector("[data-md-toggle=search]");if(!(t instanceof HTMLInputElement))throw new ReferenceError;var n=document.querySelector("[data-md-component=query]");if(!(n instanceof HTMLInputElement))throw new ReferenceError;if(!e.metaKey&&!e.ctrlKey)if(t.checked){if(13===e.keyCode){if(n===document.activeElement){e.preventDefault();var r=document.querySelector("[data-md-component=search] [href][data-md-state=active]");r instanceof HTMLLinkElement&&(window.location=r.getAttribute("href"),t.checked=!1,t.dispatchEvent(new CustomEvent("change")),n.blur())}}else if(9===e.keyCode||27===e.keyCode)t.checked=!1,t.dispatchEvent(new CustomEvent("change")),n.blur();else if(-1!==[8,37,39].indexOf(e.keyCode))n!==document.activeElement&&n.focus();else if(-1!==[38,40].indexOf(e.keyCode)){var i=e.keyCode,o=Array.prototype.slice.call(document.querySelectorAll("[data-md-component=query], [data-md-component=search] [href]")),a=o.find(function(e){if(!(e instanceof HTMLElement))throw new ReferenceError;return"active"===e.dataset.mdState});a&&(a.dataset.mdState="");var s=Math.max(0,(o.indexOf(a)+o.length+(38===i?-1:1))%o.length);return o[s]&&(o[s].dataset.mdState="active",o[s].focus()),e.preventDefault(),e.stopPropagation(),!1}}else document.activeElement&&!document.activeElement.form&&(70!==e.keyCode&&83!==e.keyCode||(n.focus(),e.preventDefault()))}).listen(),new f.default.Event.Listener(window,"keypress",function(){var e=document.querySelector("[data-md-toggle=search]");if(!(e instanceof HTMLInputElement))throw new ReferenceError;if(e.checked){var t=document.querySelector("[data-md-component=query]");if(!(t instanceof HTMLInputElement))throw new ReferenceError;t!==document.activeElement&&t.focus()}}).listen()),new f.default.Event.Listener(document.body,"keydown",function(e){if(9===e.keyCode){var t=document.querySelectorAll("[data-md-component=navigation] .md-nav__link[for]:not([tabindex])");Array.prototype.forEach.call(t,function(e){e.offsetHeight&&(e.tabIndex=0)})}}).listen(),new f.default.Event.Listener(document.body,"mousedown",function(){var e=document.querySelectorAll("[data-md-component=navigation] .md-nav__link[tabindex]");Array.prototype.forEach.call(e,function(e){e.removeAttribute("tabIndex")})}).listen(),document.body.addEventListener("click",function(){"tabbing"===document.body.dataset.mdState&&(document.body.dataset.mdState="")}),new f.default.Event.MatchMedia("(max-width: 959px)",new f.default.Event.Listener("[data-md-component=navigation] [href^='#']","click",function(){var e=document.querySelector("[data-md-toggle=drawer]");if(!(e instanceof HTMLInputElement))throw new ReferenceError;e.checked&&(e.checked=!1,e.dispatchEvent(new CustomEvent("change")))})),function(){var e=document.querySelector("[data-md-source]");if(!e)return a.default.resolve([]);if(!(e instanceof HTMLAnchorElement))throw new ReferenceError;switch(e.dataset.mdSource){case"github":return new f.default.Source.Adapter.GitHub(e).fetch();default:return a.default.resolve([])}}().then(function(e){var t=document.querySelectorAll("[data-md-source]");Array.prototype.forEach.call(t,function(t){new f.default.Source.Repository(t).initialize(e)})})}t.__esModule=!0,t.app=void 0,n(7),n(8),n(9),n(10),n(11),n(12),n(13);var o=n(14),a=r(o),s=n(18),c=r(s),u=n(26),l=r(u),d=n(27),f=r(d);window.Promise=window.Promise||a.default;var h=function(e){var t=document.getElementsByName("lang:"+e)[0];if(!(t instanceof HTMLMetaElement))throw new ReferenceError;return t.content},p={initialize:i};t.app=p}).call(t,n(0))},function(e,t,n){e.exports=n.p+"assets/images/icons/bitbucket.4ebea66e.svg"},function(e,t,n){e.exports=n.p+"assets/images/icons/github.a4034fb1.svg"},function(e,t,n){e.exports=n.p+"assets/images/icons/gitlab.d80e5efc.svg"},function(e,t){},function(e,t){},function(e,t){try{var n=new window.CustomEvent("test");if(n.preventDefault(),!0!==n.defaultPrevented)throw new Error("Could not prevent default")}catch(e){var r=function(e,t){var n,r;return t=t||{bubbles:!1,cancelable:!1,detail:void 0},n=document.createEvent("CustomEvent"),n.initCustomEvent(e,t.bubbles,t.cancelable,t.detail),r=n.preventDefault,n.preventDefault=function(){r.call(this);try{Object.defineProperty(this,"defaultPrevented",{get:function(){return!0}})}catch(e){this.defaultPrevented=!0}},n};r.prototype=window.Event.prototype,window.CustomEvent=r}},function(e,t,n){window.fetch||(window.fetch=n(2).default||n(2))},function(e,t,n){"use strict";(function(t){function n(){}function r(e,t){return function(){e.apply(t,arguments)}}function i(e){if(!(this instanceof i))throw new TypeError("Promises must be constructed via new");if("function"!=typeof e)throw new TypeError("not a function");this._state=0,this._handled=!1,this._value=void 0,this._deferreds=[],l(e,this)}function o(e,t){for(;3===e._state;)e=e._value;if(0===e._state)return void e._deferreds.push(t);e._handled=!0,i._immediateFn(function(){var n=1===e._state?t.onFulfilled:t.onRejected;if(null===n)return void(1===e._state?a:s)(t.promise,e._value);var r;try{r=n(e._value)}catch(e){return void s(t.promise,e)}a(t.promise,r)})}function a(e,t){try{if(t===e)throw new TypeError("A promise cannot be resolved with itself.");if(t&&("object"==typeof t||"function"==typeof t)){var n=t.then;if(t instanceof i)return e._state=3,e._value=t,void c(e);if("function"==typeof n)return void l(r(n,t),e)}e._state=1,e._value=t,c(e)}catch(t){s(e,t)}}function s(e,t){e._state=2,e._value=t,c(e)}function c(e){2===e._state&&0===e._deferreds.length&&i._immediateFn(function(){e._handled||i._unhandledRejectionFn(e._value)});for(var t=0,n=e._deferreds.length;t=0&&(e._idleTimeoutId=setTimeout(function(){e._onTimeout&&e._onTimeout()},t))},n(16),t.setImmediate="undefined"!=typeof self&&self.setImmediate||void 0!==e&&e.setImmediate||this&&this.setImmediate,t.clearImmediate="undefined"!=typeof self&&self.clearImmediate||void 0!==e&&e.clearImmediate||this&&this.clearImmediate}).call(t,n(1))},function(e,t,n){(function(e,t){!function(e,n){"use strict";function r(e){"function"!=typeof e&&(e=new Function(""+e));for(var t=new Array(arguments.length-1),n=0;n1)for(var n=1;n0&&void 0!==arguments[0]?arguments[0]:{};this.action="function"==typeof e.action?e.action:this.defaultAction,this.target="function"==typeof e.target?e.target:this.defaultTarget,this.text="function"==typeof e.text?e.text:this.defaultText,this.container="object"===f(e.container)?e.container:document.body}},{key:"listenClick",value:function(e){var t=this;this.listener=(0,d.default)(e,"click",function(e){return t.onClick(e)})}},{key:"onClick",value:function(e){var t=e.delegateTarget||e.currentTarget;this.clipboardAction&&(this.clipboardAction=null),this.clipboardAction=new u.default({action:this.action(t),target:this.target(t),text:this.text(t),container:this.container,trigger:t,emitter:this})}},{key:"defaultAction",value:function(e){return c("action",e)}},{key:"defaultTarget",value:function(e){var t=c("target",e);if(t)return document.querySelector(t)}},{key:"defaultText",value:function(e){return c("text",e)}},{key:"destroy",value:function(){this.listener.destroy(),this.clipboardAction&&(this.clipboardAction.destroy(),this.clipboardAction=null)}}],[{key:"isSupported",value:function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:["copy","cut"],t="string"==typeof e?[e]:e,n=!!document.queryCommandSupported;return t.forEach(function(e){n=n&&!!document.queryCommandSupported(e)}),n}}]),t}(l.default);e.exports=p})},function(e,t,n){var r,i,o;!function(a,s){i=[e,n(20)],r=s,void 0!==(o="function"==typeof r?r.apply(t,i):r)&&(e.exports=o)}(0,function(e,t){"use strict";function n(e,t){if(!(e instanceof t))throw new TypeError("Cannot call a class as a function")}var r=function(e){return e&&e.__esModule?e:{default:e}}(t),i="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e},o=function(){function e(e,t){for(var n=0;n0&&void 0!==arguments[0]?arguments[0]:{};this.action=e.action,this.container=e.container,this.emitter=e.emitter,this.target=e.target,this.text=e.text,this.trigger=e.trigger,this.selectedText=""}},{key:"initSelection",value:function(){this.text?this.selectFake():this.target&&this.selectTarget()}},{key:"selectFake",value:function(){var e=this,t="rtl"==document.documentElement.getAttribute("dir");this.removeFake(),this.fakeHandlerCallback=function(){return e.removeFake()},this.fakeHandler=this.container.addEventListener("click",this.fakeHandlerCallback)||!0,this.fakeElem=document.createElement("textarea"),this.fakeElem.style.fontSize="12pt",this.fakeElem.style.border="0",this.fakeElem.style.padding="0",this.fakeElem.style.margin="0",this.fakeElem.style.position="absolute",this.fakeElem.style[t?"right":"left"]="-9999px";var n=window.pageYOffset||document.documentElement.scrollTop;this.fakeElem.style.top=n+"px",this.fakeElem.setAttribute("readonly",""),this.fakeElem.value=this.text,this.container.appendChild(this.fakeElem),this.selectedText=(0,r.default)(this.fakeElem),this.copyText()}},{key:"removeFake",value:function(){this.fakeHandler&&(this.container.removeEventListener("click",this.fakeHandlerCallback),this.fakeHandler=null,this.fakeHandlerCallback=null),this.fakeElem&&(this.container.removeChild(this.fakeElem),this.fakeElem=null)}},{key:"selectTarget",value:function(){this.selectedText=(0,r.default)(this.target),this.copyText()}},{key:"copyText",value:function(){var e=void 0;try{e=document.execCommand(this.action)}catch(t){e=!1}this.handleResult(e)}},{key:"handleResult",value:function(e){this.emitter.emit(e?"success":"error",{action:this.action,text:this.selectedText,trigger:this.trigger,clearSelection:this.clearSelection.bind(this)})}},{key:"clearSelection",value:function(){this.trigger&&this.trigger.focus(),window.getSelection().removeAllRanges()}},{key:"destroy",value:function(){this.removeFake()}},{key:"action",set:function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:"copy";if(this._action=e,"copy"!==this._action&&"cut"!==this._action)throw new Error('Invalid "action" value, use either "copy" or "cut"')},get:function(){return this._action}},{key:"target",set:function(e){if(void 0!==e){if(!e||"object"!==(void 0===e?"undefined":i(e))||1!==e.nodeType)throw new Error('Invalid "target" value, use a valid Element');if("copy"===this.action&&e.hasAttribute("disabled"))throw new Error('Invalid "target" attribute. Please use "readonly" instead of "disabled" attribute');if("cut"===this.action&&(e.hasAttribute("readonly")||e.hasAttribute("disabled")))throw new Error('Invalid "target" attribute. You can\'t cut text from elements with "readonly" or "disabled" attributes');this._target=e}},get:function(){return this._target}}]),e}();e.exports=a})},function(e,t){function n(e){var t;if("SELECT"===e.nodeName)e.focus(),t=e.value;else if("INPUT"===e.nodeName||"TEXTAREA"===e.nodeName){var n=e.hasAttribute("readonly");n||e.setAttribute("readonly",""),e.select(),e.setSelectionRange(0,e.value.length),n||e.removeAttribute("readonly"),t=e.value}else{e.hasAttribute("contenteditable")&&e.focus();var r=window.getSelection(),i=document.createRange();i.selectNodeContents(e),r.removeAllRanges(),r.addRange(i),t=r.toString()}return t}e.exports=n},function(e,t){function n(){}n.prototype={on:function(e,t,n){var r=this.e||(this.e={});return(r[e]||(r[e]=[])).push({fn:t,ctx:n}),this},once:function(e,t,n){function r(){i.off(e,r),t.apply(n,arguments)}var i=this;return r._=t,this.on(e,r,n)},emit:function(e){var t=[].slice.call(arguments,1),n=((this.e||(this.e={}))[e]||[]).slice(),r=0,i=n.length;for(r;r=0,a=navigator.userAgent.indexOf("Android")>0&&!o,s=/iP(ad|hone|od)/.test(navigator.userAgent)&&!o,c=s&&/OS 4_\d(_\d)?/.test(navigator.userAgent),u=s&&/OS [6-7]_\d/.test(navigator.userAgent),l=navigator.userAgent.indexOf("BB10")>0;i.prototype.needsClick=function(e){switch(e.nodeName.toLowerCase()){case"button":case"select":case"textarea":if(e.disabled)return!0;break;case"input":if(s&&"file"===e.type||e.disabled)return!0;break;case"label":case"iframe":case"video":return!0}return/\bneedsclick\b/.test(e.className)},i.prototype.needsFocus=function(e){switch(e.nodeName.toLowerCase()){case"textarea":return!0;case"select":return!a;case"input":switch(e.type){case"button":case"checkbox":case"file":case"image":case"radio":case"submit":return!1}return!e.disabled&&!e.readOnly;default:return/\bneedsfocus\b/.test(e.className)}},i.prototype.sendClick=function(e,t){var n,r;document.activeElement&&document.activeElement!==e&&document.activeElement.blur(),r=t.changedTouches[0],n=document.createEvent("MouseEvents"),n.initMouseEvent(this.determineEventType(e),!0,!0,window,1,r.screenX,r.screenY,r.clientX,r.clientY,!1,!1,!1,!1,0,null),n.forwardedTouchEvent=!0,e.dispatchEvent(n)},i.prototype.determineEventType=function(e){return a&&"select"===e.tagName.toLowerCase()?"mousedown":"click"},i.prototype.focus=function(e){var t;s&&e.setSelectionRange&&0!==e.type.indexOf("date")&&"time"!==e.type&&"month"!==e.type?(t=e.value.length,e.setSelectionRange(t,t)):e.focus()},i.prototype.updateScrollParent=function(e){var t,n;if(!(t=e.fastClickScrollParent)||!t.contains(e)){n=e;do{if(n.scrollHeight>n.offsetHeight){t=n,e.fastClickScrollParent=n;break}n=n.parentElement}while(n)}t&&(t.fastClickLastScrollTop=t.scrollTop)},i.prototype.getTargetElementFromEventTarget=function(e){return e.nodeType===Node.TEXT_NODE?e.parentNode:e},i.prototype.onTouchStart=function(e){var t,n,r;if(e.targetTouches.length>1)return!0;if(t=this.getTargetElementFromEventTarget(e.target),n=e.targetTouches[0],s){if(r=window.getSelection(),r.rangeCount&&!r.isCollapsed)return!0;if(!c){if(n.identifier&&n.identifier===this.lastTouchIdentifier)return e.preventDefault(),!1;this.lastTouchIdentifier=n.identifier,this.updateScrollParent(t)}}return this.trackingClick=!0,this.trackingClickStart=e.timeStamp,this.targetElement=t,this.touchStartX=n.pageX,this.touchStartY=n.pageY,e.timeStamp-this.lastClickTimen||Math.abs(t.pageY-this.touchStartY)>n},i.prototype.onTouchMove=function(e){return!this.trackingClick||((this.targetElement!==this.getTargetElementFromEventTarget(e.target)||this.touchHasMoved(e))&&(this.trackingClick=!1,this.targetElement=null),!0)},i.prototype.findControl=function(e){return void 0!==e.control?e.control:e.htmlFor?document.getElementById(e.htmlFor):e.querySelector("button, input:not([type=hidden]), keygen, meter, output, progress, select, textarea")},i.prototype.onTouchEnd=function(e){var t,n,r,i,o,l=this.targetElement;if(!this.trackingClick)return!0;if(e.timeStamp-this.lastClickTimethis.tapTimeout)return!0;if(this.cancelNextClick=!1,this.lastClickTime=e.timeStamp,n=this.trackingClickStart,this.trackingClick=!1,this.trackingClickStart=0,u&&(o=e.changedTouches[0],l=document.elementFromPoint(o.pageX-window.pageXOffset,o.pageY-window.pageYOffset)||l,l.fastClickScrollParent=this.targetElement.fastClickScrollParent),"label"===(r=l.tagName.toLowerCase())){if(t=this.findControl(l)){if(this.focus(l),a)return!1;l=t}}else if(this.needsFocus(l))return e.timeStamp-n>100||s&&window.top!==window&&"input"===r?(this.targetElement=null,!1):(this.focus(l),this.sendClick(l,e),s&&"select"===r||(this.targetElement=null,e.preventDefault()),!1);return!(!s||c||!(i=l.fastClickScrollParent)||i.fastClickLastScrollTop===i.scrollTop)||(this.needsClick(l)||(e.preventDefault(),this.sendClick(l,e)),!1)},i.prototype.onTouchCancel=function(){this.trackingClick=!1,this.targetElement=null},i.prototype.onMouse=function(e){return!this.targetElement||(!!e.forwardedTouchEvent||(!e.cancelable||(!(!this.needsClick(this.targetElement)||this.cancelNextClick)||(e.stopImmediatePropagation?e.stopImmediatePropagation():e.propagationStopped=!0,e.stopPropagation(),e.preventDefault(),!1))))},i.prototype.onClick=function(e){var t;return this.trackingClick?(this.targetElement=null,this.trackingClick=!1,!0):"submit"===e.target.type&&0===e.detail||(t=this.onMouse(e),t||(this.targetElement=null),t)},i.prototype.destroy=function(){var e=this.layer;a&&(e.removeEventListener("mouseover",this.onMouse,!0),e.removeEventListener("mousedown",this.onMouse,!0),e.removeEventListener("mouseup",this.onMouse,!0)),e.removeEventListener("click",this.onClick,!0),e.removeEventListener("touchstart",this.onTouchStart,!1),e.removeEventListener("touchmove",this.onTouchMove,!1),e.removeEventListener("touchend",this.onTouchEnd,!1),e.removeEventListener("touchcancel",this.onTouchCancel,!1)},i.notNeeded=function(e){var t,n,r;if(void 0===window.ontouchstart)return!0;if(n=+(/Chrome\/([0-9]+)/.exec(navigator.userAgent)||[,0])[1]){if(!a)return!0;if(t=document.querySelector("meta[name=viewport]")){if(-1!==t.content.indexOf("user-scalable=no"))return!0;if(n>31&&document.documentElement.scrollWidth<=window.outerWidth)return!0}}if(l&&(r=navigator.userAgent.match(/Version\/([0-9]*)\.([0-9]*)/),r[1]>=10&&r[2]>=3&&(t=document.querySelector("meta[name=viewport]")))){if(-1!==t.content.indexOf("user-scalable=no"))return!0;if(document.documentElement.scrollWidth<=window.outerWidth)return!0}return"none"===e.style.msTouchAction||"manipulation"===e.style.touchAction||(!!(+(/Firefox\/([0-9]+)/.exec(navigator.userAgent)||[,0])[1]>=27&&(t=document.querySelector("meta[name=viewport]"))&&(-1!==t.content.indexOf("user-scalable=no")||document.documentElement.scrollWidth<=window.outerWidth))||("none"===e.style.touchAction||"manipulation"===e.style.touchAction))},i.attach=function(e,t){return new i(e,t)},void 0!==(r=function(){return i}.call(t,n,t,e))&&(e.exports=r)}()},function(e,t,n){"use strict";function r(e){return e&&e.__esModule?e:{default:e}}t.__esModule=!0;var i=n(28),o=r(i),a=n(30),s=r(a),c=n(33),u=r(c),l=n(37),d=r(l),f=n(43),h=r(f),p=n(45),m=r(p),v=n(51),y=r(v);t.default={Event:o.default,Header:s.default,Nav:u.default,Search:d.default,Sidebar:h.default,Source:m.default,Tabs:y.default}},function(e,t,n){"use strict";function r(e){return e&&e.__esModule?e:{default:e}}t.__esModule=!0;var i=n(3),o=r(i),a=n(29),s=r(a);t.default={Listener:o.default,MatchMedia:s.default}},function(e,t,n){"use strict";function r(e,t){if(!(e instanceof t))throw new TypeError("Cannot call a class as a function")}t.__esModule=!0;var i=n(3),o=(function(e){e&&e.__esModule}(i),function e(t,n){r(this,e),this.handler_=function(e){e.matches?n.listen():n.unlisten()};var i=window.matchMedia(t);i.addListener(this.handler_),this.handler_(i)});t.default=o},function(e,t,n){"use strict";function r(e){return e&&e.__esModule?e:{default:e}}t.__esModule=!0;var i=n(31),o=r(i),a=n(32),s=r(a);t.default={Shadow:o.default,Title:s.default}},function(e,t,n){"use strict";function r(e,t){if(!(e instanceof t))throw new TypeError("Cannot call a class as a function")}t.__esModule=!0;var i=function(){function e(t,n){r(this,e);var i="string"==typeof t?document.querySelector(t):t;if(!(i instanceof HTMLElement&&i.parentNode instanceof HTMLElement))throw new ReferenceError;if(this.el_=i.parentNode,!((i="string"==typeof n?document.querySelector(n):n)instanceof HTMLElement))throw new ReferenceError;this.header_=i,this.height_=0,this.active_=!1}return e.prototype.setup=function(){for(var e=this.el_;e=e.previousElementSibling;){if(!(e instanceof HTMLElement))throw new ReferenceError;this.height_+=e.offsetHeight}this.update()},e.prototype.update=function(e){if(!e||"resize"!==e.type&&"orientationchange"!==e.type){var t=window.pageYOffset>=this.height_;t!==this.active_&&(this.header_.dataset.mdState=(this.active_=t)?"shadow":"")}else this.height_=0,this.setup()},e.prototype.reset=function(){this.header_.dataset.mdState="",this.height_=0,this.active_=!1},e}();t.default=i},function(e,t,n){"use strict";function r(e,t){if(!(e instanceof t))throw new TypeError("Cannot call a class as a function")}t.__esModule=!0;var i=function(){function e(t,n){r(this,e);var i="string"==typeof t?document.querySelector(t):t;if(!(i instanceof HTMLElement))throw new ReferenceError;if(this.el_=i,!((i="string"==typeof n?document.querySelector(n):n)instanceof HTMLHeadingElement))throw new ReferenceError;this.header_=i,this.active_=!1}return e.prototype.setup=function(){var e=this;Array.prototype.forEach.call(this.el_.children,function(t){t.style.width=e.el_.offsetWidth-20+"px"})},e.prototype.update=function(e){var t=this,n=window.pageYOffset>=this.header_.offsetTop;n!==this.active_&&(this.el_.dataset.mdState=(this.active_=n)?"active":""),"resize"!==e.type&&"orientationchange"!==e.type||Array.prototype.forEach.call(this.el_.children,function(e){e.style.width=t.el_.offsetWidth-20+"px"})},e.prototype.reset=function(){this.el_.dataset.mdState="",this.el_.style.width="",this.active_=!1},e}();t.default=i},function(e,t,n){"use strict";function r(e){return e&&e.__esModule?e:{default:e}}t.__esModule=!0;var i=n(34),o=r(i),a=n(35),s=r(a),c=n(36),u=r(c);t.default={Blur:o.default,Collapse:s.default,Scrolling:u.default}},function(e,t,n){"use strict";function r(e,t){if(!(e instanceof t))throw new TypeError("Cannot call a class as a function")}t.__esModule=!0;var i=function(){function e(t){r(this,e),this.els_="string"==typeof t?document.querySelectorAll(t):t,this.index_=0,this.offset_=window.pageYOffset,this.dir_=!1,this.anchors_=[].reduce.call(this.els_,function(e,t){return e.concat(document.getElementById(t.hash.substring(1))||[])},[])}return e.prototype.setup=function(){this.update()},e.prototype.update=function(){var e=window.pageYOffset,t=this.offset_-e<0;if(this.dir_!==t&&(this.index_=this.index_=t?0:this.els_.length-1),0!==this.anchors_.length){if(this.offset_<=e)for(var n=this.index_+1;n0&&(this.els_[n-1].dataset.mdState="blur"),this.index_=n;else for(var r=this.index_;r>=0;r--){if(!(this.anchors_[r].offsetTop-80>e)){this.index_=r;break}r>0&&(this.els_[r-1].dataset.mdState="")}this.offset_=e,this.dir_=t}},e.prototype.reset=function(){Array.prototype.forEach.call(this.els_,function(e){e.dataset.mdState=""}),this.index_=0,this.offset_=window.pageYOffset},e}();t.default=i},function(e,t,n){"use strict";function r(e,t){if(!(e instanceof t))throw new TypeError("Cannot call a class as a function")}t.__esModule=!0;var i=function(){function e(t){r(this,e);var n="string"==typeof t?document.querySelector(t):t;if(!(n instanceof HTMLElement))throw new ReferenceError;this.el_=n}return e.prototype.setup=function(){var e=this.el_.getBoundingClientRect().height;this.el_.style.display=e?"block":"none",this.el_.style.overflow=e?"visible":"hidden"},e.prototype.update=function(){var e=this,t=this.el_.getBoundingClientRect().height;if(this.el_.style.display="block",this.el_.style.overflow="",t)this.el_.style.maxHeight=t+"px",requestAnimationFrame(function(){e.el_.setAttribute("data-md-state","animate"),e.el_.style.maxHeight="0px"});else{this.el_.setAttribute("data-md-state","expand"),this.el_.style.maxHeight="";var n=this.el_.getBoundingClientRect().height;this.el_.removeAttribute("data-md-state"),this.el_.style.maxHeight="0px",requestAnimationFrame(function(){e.el_.setAttribute("data-md-state","animate"),e.el_.style.maxHeight=n+"px"})}var r=function e(n){var r=n.target;if(!(r instanceof HTMLElement))throw new ReferenceError;r.removeAttribute("data-md-state"),r.style.maxHeight="",r.style.display=t?"none":"block",r.style.overflow=t?"hidden":"visible",r.removeEventListener("transitionend",e)};this.el_.addEventListener("transitionend",r,!1)},e.prototype.reset=function(){this.el_.dataset.mdState="",this.el_.style.maxHeight="",this.el_.style.display="",this.el_.style.overflow=""},e}();t.default=i},function(e,t,n){"use strict";function r(e,t){if(!(e instanceof t))throw new TypeError("Cannot call a class as a function")}t.__esModule=!0;var i=function(){function e(t){r(this,e);var n="string"==typeof t?document.querySelector(t):t;if(!(n instanceof HTMLElement))throw new ReferenceError;this.el_=n}return e.prototype.setup=function(){this.el_.children[this.el_.children.length-1].style.webkitOverflowScrolling="touch";var e=this.el_.querySelectorAll("[data-md-toggle]");Array.prototype.forEach.call(e,function(e){if(!(e instanceof HTMLInputElement))throw new ReferenceError;if(e.checked){var t=e.nextElementSibling;if(!(t instanceof HTMLElement))throw new ReferenceError;for(;"NAV"!==t.tagName&&t.nextElementSibling;)t=t.nextElementSibling;if(!(e.parentNode instanceof HTMLElement&&e.parentNode.parentNode instanceof HTMLElement))throw new ReferenceError;var n=e.parentNode.parentNode,r=t.children[t.children.length-1];n.style.webkitOverflowScrolling="",r.style.webkitOverflowScrolling="touch"}})},e.prototype.update=function(e){var t=e.target;if(!(t instanceof HTMLElement))throw new ReferenceError;var n=t.nextElementSibling;if(!(n instanceof HTMLElement))throw new ReferenceError;for(;"NAV"!==n.tagName&&n.nextElementSibling;)n=n.nextElementSibling;if(!(t.parentNode instanceof HTMLElement&&t.parentNode.parentNode instanceof HTMLElement))throw new ReferenceError;var r=t.parentNode.parentNode,i=n.children[n.children.length-1];if(r.style.webkitOverflowScrolling="",i.style.webkitOverflowScrolling="",!t.checked){var o=function e(){n instanceof HTMLElement&&(r.style.webkitOverflowScrolling="touch",n.removeEventListener("transitionend",e))};n.addEventListener("transitionend",o,!1)}if(t.checked){var a=function e(){n instanceof HTMLElement&&(i.style.webkitOverflowScrolling="touch",n.removeEventListener("transitionend",e))};n.addEventListener("transitionend",a,!1)}},e.prototype.reset=function(){this.el_.children[1].style.webkitOverflowScrolling="";var e=this.el_.querySelectorAll("[data-md-toggle]");Array.prototype.forEach.call(e,function(e){if(!(e instanceof HTMLInputElement))throw new ReferenceError;if(e.checked){var t=e.nextElementSibling;if(!(t instanceof HTMLElement))throw new ReferenceError;for(;"NAV"!==t.tagName&&t.nextElementSibling;)t=t.nextElementSibling;if(!(e.parentNode instanceof HTMLElement&&e.parentNode.parentNode instanceof HTMLElement))throw new ReferenceError;var n=e.parentNode.parentNode,r=t.children[t.children.length-1];n.style.webkitOverflowScrolling="",r.style.webkitOverflowScrolling=""}})},e}();t.default=i},function(e,t,n){"use strict";function r(e){return e&&e.__esModule?e:{default:e}}t.__esModule=!0;var i=n(38),o=r(i),a=n(39),s=r(a);t.default={Lock:o.default,Result:s.default}},function(e,t,n){"use strict";function r(e,t){if(!(e instanceof t))throw new TypeError("Cannot call a class as a function")}t.__esModule=!0;var i=function(){function e(t){r(this,e);var n="string"==typeof t?document.querySelector(t):t;if(!(n instanceof HTMLInputElement))throw new ReferenceError;if(this.el_=n,!document.body)throw new ReferenceError;this.lock_=document.body}return e.prototype.setup=function(){this.update()},e.prototype.update=function(){var e=this;this.el_.checked?(this.offset_=window.pageYOffset,setTimeout(function(){window.scrollTo(0,0),e.el_.checked&&(e.lock_.dataset.mdState="lock")},400)):(this.lock_.dataset.mdState="",setTimeout(function(){void 0!==e.offset_&&window.scrollTo(0,e.offset_)},100))},e.prototype.reset=function(){"lock"===this.lock_.dataset.mdState&&window.scrollTo(0,this.offset_),this.lock_.dataset.mdState=""},e}();t.default=i},function(e,t,n){"use strict";(function(e){function r(e){return e&&e.__esModule?e:{default:e}}function i(e,t){if(!(e instanceof t))throw new TypeError("Cannot call a class as a function")}t.__esModule=!0;var o=n(40),a=r(o),s=n(41),c=r(s),u=function(e,t){var n=t;if(e.length>n){for(;" "!==e[n]&&--n>0;);return e.substring(0,n)+"..."}return e},l=function(e){var t=document.getElementsByName("lang:"+e)[0];if(!(t instanceof HTMLMetaElement))throw new ReferenceError;return t.content},d=function(){function t(e,n){i(this,t);var r="string"==typeof e?document.querySelector(e):e;if(!(r instanceof HTMLElement))throw new ReferenceError;this.el_=r;var o=Array.prototype.slice.call(this.el_.children),a=o[0],s=o[1];this.data_=n,this.meta_=a,this.list_=s,this.message_={placeholder:this.meta_.textContent,none:l("search.result.none"),one:l("search.result.one"),other:l("search.result.other")};var u=l("search.tokenizer");u.length&&(c.default.tokenizer.separator=u),this.lang_=l("search.language").split(",").filter(Boolean).map(function(e){return e.trim()})}return t.prototype.update=function(t){var n=this;if("focus"!==t.type||this.index_){if("focus"===t.type||"keyup"===t.type){var r=t.target;if(!(r instanceof HTMLInputElement))throw new ReferenceError;if(!this.index_||r.value===this.value_)return;for(;this.list_.firstChild;)this.list_.removeChild(this.list_.firstChild);if(this.value_=r.value,0===this.value_.length)return void(this.meta_.textContent=this.message_.placeholder);var i=this.index_.query(function(e){n.value_.toLowerCase().split(" ").filter(Boolean).forEach(function(t){e.term(t,{wildcard:c.default.Query.wildcard.TRAILING})})}).reduce(function(e,t){var r=n.docs_.get(t.ref);if(r.parent){var i=r.parent.location;e.set(i,(e.get(i)||[]).concat(t))}else{var o=r.location;e.set(o,e.get(o)||[])}return e},new Map),o=(0,a.default)(this.value_.trim()).replace(new RegExp(c.default.tokenizer.separator,"img"),"|"),s=new RegExp("(^|"+c.default.tokenizer.separator+")("+o+")","img"),d=function(e,t,n){return t+""+n+""};this.stack_=[],i.forEach(function(t,r){var i,o=n.docs_.get(r),a=e.createElement("li",{class:"md-search-result__item"},e.createElement("a",{href:o.location,title:o.title,class:"md-search-result__link",tabindex:"-1"},e.createElement("article",{class:"md-search-result__article md-search-result__article--document"},e.createElement("h1",{class:"md-search-result__title"},{__html:o.title.replace(s,d)}),o.text.length?e.createElement("p",{class:"md-search-result__teaser"},{__html:o.text.replace(s,d)}):{}))),c=t.map(function(t){return function(){var r=n.docs_.get(t.ref);a.appendChild(e.createElement("a",{href:r.location,title:r.title,class:"md-search-result__link","data-md-rel":"anchor",tabindex:"-1"},e.createElement("article",{class:"md-search-result__article"},e.createElement("h1",{class:"md-search-result__title"},{__html:r.title.replace(s,d)}),r.text.length?e.createElement("p",{class:"md-search-result__teaser"},{__html:u(r.text.replace(s,d),400)}):{})))}});(i=n.stack_).push.apply(i,[function(){return n.list_.appendChild(a)}].concat(c))});var f=this.el_.parentNode;if(!(f instanceof HTMLElement))throw new ReferenceError;for(;this.stack_.length&&f.offsetHeight>=f.scrollHeight-16;)this.stack_.shift()();var h=this.list_.querySelectorAll("[data-md-rel=anchor]");switch(Array.prototype.forEach.call(h,function(e){["click","keydown"].forEach(function(t){e.addEventListener(t,function(n){if("keydown"!==t||13===n.keyCode){var r=document.querySelector("[data-md-toggle=search]");if(!(r instanceof HTMLInputElement))throw new ReferenceError;r.checked&&(r.checked=!1,r.dispatchEvent(new CustomEvent("change"))),n.preventDefault(),setTimeout(function(){document.location.href=e.href},100)}})})}),i.size){case 0:this.meta_.textContent=this.message_.none;break;case 1:this.meta_.textContent=this.message_.one;break;default:this.meta_.textContent=this.message_.other.replace("#",i.size)}}}else{var p=function(e){n.docs_=e.reduce(function(e,t){var n=t.location.split("#"),r=n[0];return n[1]&&(t.parent=e.get(r),t.parent&&!t.parent.done&&(t.parent.title=t.title,t.parent.text=t.text,t.parent.done=!0)),t.text=t.text.replace(/\n/g," ").replace(/\s+/g," ").replace(/\s+([,.:;!?])/g,function(e,t){return t}),t.parent&&t.parent.title===t.title||e.set(t.location,t),e},new Map);var t=n.docs_,r=n.lang_;n.stack_=[],n.index_=(0,c.default)(function(){var e,n=this,i={"search.pipeline.trimmer":c.default.trimmer,"search.pipeline.stopwords":c.default.stopWordFilter},o=Object.keys(i).reduce(function(e,t){return l(t).match(/^false$/i)||e.push(i[t]),e},[]);this.pipeline.reset(),o&&(e=this.pipeline).add.apply(e,o),1===r.length&&"en"!==r[0]&&c.default[r[0]]?this.use(c.default[r[0]]):r.length>1&&this.use(c.default.multiLanguage.apply(c.default,r)),this.field("title",{boost:10}),this.field("text"),this.ref("location"),t.forEach(function(e){return n.add(e)})});var i=n.el_.parentNode;if(!(i instanceof HTMLElement))throw new ReferenceError;i.addEventListener("scroll",function(){for(;n.stack_.length&&i.scrollTop+i.offsetHeight>=i.scrollHeight-16;)n.stack_.splice(0,10).forEach(function(e){return e()})})};setTimeout(function(){return"function"==typeof n.data_?n.data_().then(p):p(n.data_)},250)}},t}();t.default=d}).call(t,n(0))},function(e,t,n){"use strict";var r=/[|\\{}()[\]^$+*?.]/g;e.exports=function(e){if("string"!=typeof e)throw new TypeError("Expected a string");return e.replace(r,"\\$&")}},function(e,t,n){(function(t){e.exports=t.lunr=n(42)}).call(t,n(1))},function(e,t,n){var r,i;!function(){var o=function(e){var t=new o.Builder;return t.pipeline.add(o.trimmer,o.stopWordFilter,o.stemmer),t.searchPipeline.add(o.stemmer),e.call(t,t),t.build()};o.version="2.1.5",o.utils={},o.utils.warn=function(e){return function(t){e.console&&console.warn&&console.warn(t)}}(this),o.utils.asString=function(e){return void 0===e||null===e?"":e.toString()},o.FieldRef=function(e,t,n){this.docRef=e,this.fieldName=t,this._stringValue=n},o.FieldRef.joiner="/",o.FieldRef.fromString=function(e){var t=e.indexOf(o.FieldRef.joiner);if(-1===t)throw"malformed field ref string";var n=e.slice(0,t),r=e.slice(t+1);return new o.FieldRef(r,n,e)},o.FieldRef.prototype.toString=function(){return void 0==this._stringValue&&(this._stringValue=this.fieldName+o.FieldRef.joiner+this.docRef),this._stringValue},o.idf=function(e,t){var n=0;for(var r in e)"_index"!=r&&(n+=Object.keys(e[r]).length);var i=(t-n+.5)/(n+.5);return Math.log(1+Math.abs(i))},o.Token=function(e,t){this.str=e||"",this.metadata=t||{}},o.Token.prototype.toString=function(){return this.str},o.Token.prototype.update=function(e){return this.str=e(this.str,this.metadata),this},o.Token.prototype.clone=function(e){return e=e||function(e){return e},new o.Token(e(this.str,this.metadata),this.metadata)},o.tokenizer=function(e){if(null==e||void 0==e)return[];if(Array.isArray(e))return e.map(function(e){return new o.Token(o.utils.asString(e).toLowerCase())});for(var t=e.toString().trim().toLowerCase(),n=t.length,r=[],i=0,a=0;i<=n;i++){var s=t.charAt(i),c=i-a;(s.match(o.tokenizer.separator)||i==n)&&(c>0&&r.push(new o.Token(t.slice(a,i),{position:[a,c],index:r.length})),a=i+1)}return r},o.tokenizer.separator=/[\s\-]+/,o.Pipeline=function(){this._stack=[]},o.Pipeline.registeredFunctions=Object.create(null),o.Pipeline.registerFunction=function(e,t){t in this.registeredFunctions&&o.utils.warn("Overwriting existing registered function: "+t),e.label=t,o.Pipeline.registeredFunctions[e.label]=e},o.Pipeline.warnIfFunctionNotRegistered=function(e){e.label&&e.label in this.registeredFunctions||o.utils.warn("Function is not registered with pipeline. This may cause problems when serialising the index.\n",e)},o.Pipeline.load=function(e){var t=new o.Pipeline;return e.forEach(function(e){var n=o.Pipeline.registeredFunctions[e];if(!n)throw new Error("Cannot load unregistered function: "+e);t.add(n)}),t},o.Pipeline.prototype.add=function(){Array.prototype.slice.call(arguments).forEach(function(e){o.Pipeline.warnIfFunctionNotRegistered(e),this._stack.push(e)},this)},o.Pipeline.prototype.after=function(e,t){o.Pipeline.warnIfFunctionNotRegistered(t);var n=this._stack.indexOf(e);if(-1==n)throw new Error("Cannot find existingFn");n+=1,this._stack.splice(n,0,t)},o.Pipeline.prototype.before=function(e,t){o.Pipeline.warnIfFunctionNotRegistered(t);var n=this._stack.indexOf(e);if(-1==n)throw new Error("Cannot find existingFn");this._stack.splice(n,0,t)},o.Pipeline.prototype.remove=function(e){var t=this._stack.indexOf(e);-1!=t&&this._stack.splice(t,1)},o.Pipeline.prototype.run=function(e){for(var t=this._stack.length,n=0;n1&&(oe&&(n=i),o!=e);)r=n-t,i=t+Math.floor(r/2),o=this.elements[2*i];return o==e?2*i:o>e?2*i:os?u+=2:a==s&&(t+=n[c+1]*r[u+1],c+=2,u+=2);return t},o.Vector.prototype.similarity=function(e){return this.dot(e)/(this.magnitude()*e.magnitude())},o.Vector.prototype.toArray=function(){for(var e=new Array(this.elements.length/2),t=1,n=0;t0){var a,s=i.str.charAt(0);s in i.node.edges?a=i.node.edges[s]:(a=new o.TokenSet,i.node.edges[s]=a),1==i.str.length?a.final=!0:r.push({node:a,editsRemaining:i.editsRemaining,str:i.str.slice(1)})}if(i.editsRemaining>0&&i.str.length>1){var c,s=i.str.charAt(1);s in i.node.edges?c=i.node.edges[s]:(c=new o.TokenSet,i.node.edges[s]=c),i.str.length<=2?c.final=!0:r.push({node:c,editsRemaining:i.editsRemaining-1,str:i.str.slice(2)})}if(i.editsRemaining>0&&1==i.str.length&&(i.node.final=!0),i.editsRemaining>0&&i.str.length>=1){if("*"in i.node.edges)var u=i.node.edges["*"];else{var u=new o.TokenSet;i.node.edges["*"]=u}1==i.str.length?u.final=!0:r.push({node:u,editsRemaining:i.editsRemaining-1,str:i.str.slice(1)})}if(i.editsRemaining>0){if("*"in i.node.edges)var l=i.node.edges["*"];else{var l=new o.TokenSet;i.node.edges["*"]=l}0==i.str.length?l.final=!0:r.push({node:l,editsRemaining:i.editsRemaining-1,str:i.str})}if(i.editsRemaining>0&&i.str.length>1){var d,f=i.str.charAt(0),h=i.str.charAt(1);h in i.node.edges?d=i.node.edges[h]:(d=new o.TokenSet,i.node.edges[h]=d),1==i.str.length?d.final=!0:r.push({node:d,editsRemaining:i.editsRemaining-1,str:f+i.str.slice(2)})}}return n},o.TokenSet.fromString=function(e){for(var t=new o.TokenSet,n=t,r=!1,i=0,a=e.length;i=e;t--){var n=this.uncheckedNodes[t],r=n.child.toString();r in this.minimizedNodes?n.parent.edges[n.char]=this.minimizedNodes[r]:(n.child._str=r,this.minimizedNodes[r]=n.child),this.uncheckedNodes.pop()}},o.Index=function(e){this.invertedIndex=e.invertedIndex,this.fieldVectors=e.fieldVectors,this.tokenSet=e.tokenSet,this.fields=e.fields,this.pipeline=e.pipeline},o.Index.prototype.search=function(e){return this.query(function(t){new o.QueryParser(e,t).parse()})},o.Index.prototype.query=function(e){var t=new o.Query(this.fields),n=Object.create(null),r=Object.create(null),i=Object.create(null);e.call(t,t);for(var a=0;a1?1:e},o.Builder.prototype.k1=function(e){this._k1=e},o.Builder.prototype.add=function(e){var t=e[this._ref];this.documentCount+=1;for(var n=0;n=this.length)return o.QueryLexer.EOS;var e=this.str.charAt(this.pos);return this.pos+=1,e},o.QueryLexer.prototype.width=function(){return this.pos-this.start},o.QueryLexer.prototype.ignore=function(){this.start==this.pos&&(this.pos+=1),this.start=this.pos},o.QueryLexer.prototype.backup=function(){this.pos-=1},o.QueryLexer.prototype.acceptDigitRun=function(){var e,t;do{e=this.next(),t=e.charCodeAt(0)}while(t>47&&t<58);e!=o.QueryLexer.EOS&&this.backup()},o.QueryLexer.prototype.more=function(){return this.pos1&&(e.backup(),e.emit(o.QueryLexer.TERM)),e.ignore(),e.more())return o.QueryLexer.lexText},o.QueryLexer.lexEditDistance=function(e){return e.ignore(),e.acceptDigitRun(),e.emit(o.QueryLexer.EDIT_DISTANCE),o.QueryLexer.lexText},o.QueryLexer.lexBoost=function(e){return e.ignore(),e.acceptDigitRun(),e.emit(o.QueryLexer.BOOST),o.QueryLexer.lexText},o.QueryLexer.lexEOS=function(e){e.width()>0&&e.emit(o.QueryLexer.TERM)},o.QueryLexer.termSeparator=o.tokenizer.separator,o.QueryLexer.lexText=function(e){for(;;){var t=e.next();if(t==o.QueryLexer.EOS)return o.QueryLexer.lexEOS;if(92!=t.charCodeAt(0)){if(":"==t)return o.QueryLexer.lexField;if("~"==t)return e.backup(),e.width()>0&&e.emit(o.QueryLexer.TERM),o.QueryLexer.lexEditDistance;if("^"==t)return e.backup(),e.width()>0&&e.emit(o.QueryLexer.TERM),o.QueryLexer.lexBoost;if(t.match(o.QueryLexer.termSeparator))return o.QueryLexer.lexTerm}else e.escapeCharacter()}},o.QueryParser=function(e,t){this.lexer=new o.QueryLexer(e),this.query=t,this.currentClause={},this.lexemeIdx=0},o.QueryParser.prototype.parse=function(){this.lexer.run(),this.lexemes=this.lexer.lexemes;for(var e=o.QueryParser.parseFieldOrTerm;e;)e=e(this);return this.query},o.QueryParser.prototype.peekLexeme=function(){return this.lexemes[this.lexemeIdx]},o.QueryParser.prototype.consumeLexeme=function(){var e=this.peekLexeme();return this.lexemeIdx+=1,e},o.QueryParser.prototype.nextClause=function(){var e=this.currentClause;this.query.clause(e),this.currentClause={}},o.QueryParser.parseFieldOrTerm=function(e){var t=e.peekLexeme();if(void 0!=t)switch(t.type){case o.QueryLexer.FIELD:return o.QueryParser.parseField;case o.QueryLexer.TERM:return o.QueryParser.parseTerm;default:var n="expected either a field or a term, found "+t.type;throw t.str.length>=1&&(n+=" with value '"+t.str+"'"),new o.QueryParseError(n,t.start,t.end)}},o.QueryParser.parseField=function(e){var t=e.consumeLexeme();if(void 0!=t){if(-1==e.query.allFields.indexOf(t.str)){var n=e.query.allFields.map(function(e){return"'"+e+"'"}).join(", "),r="unrecognised field '"+t.str+"', possible fields: "+n;throw new o.QueryParseError(r,t.start,t.end)}e.currentClause.fields=[t.str];var i=e.peekLexeme();if(void 0==i){var r="expecting term, found nothing";throw new o.QueryParseError(r,t.start,t.end)}switch(i.type){case o.QueryLexer.TERM:return o.QueryParser.parseTerm;default:var r="expecting term, found '"+i.type+"'";throw new o.QueryParseError(r,i.start,i.end)}}},o.QueryParser.parseTerm=function(e){var t=e.consumeLexeme();if(void 0!=t){e.currentClause.term=t.str.toLowerCase(),-1!=t.str.indexOf("*")&&(e.currentClause.usePipeline=!1);var n=e.peekLexeme();if(void 0==n)return void e.nextClause();switch(n.type){case o.QueryLexer.TERM:return e.nextClause(),o.QueryParser.parseTerm;case o.QueryLexer.FIELD:return e.nextClause(),o.QueryParser.parseField;case o.QueryLexer.EDIT_DISTANCE:return o.QueryParser.parseEditDistance;case o.QueryLexer.BOOST:return o.QueryParser.parseBoost;default:var r="Unexpected lexeme type '"+n.type+"'";throw new o.QueryParseError(r,n.start,n.end)}}},o.QueryParser.parseEditDistance=function(e){var t=e.consumeLexeme();if(void 0!=t){var n=parseInt(t.str,10);if(isNaN(n)){var r="edit distance must be numeric";throw new o.QueryParseError(r,t.start,t.end)}e.currentClause.editDistance=n;var i=e.peekLexeme();if(void 0==i)return void e.nextClause();switch(i.type){case o.QueryLexer.TERM:return e.nextClause(),o.QueryParser.parseTerm;case o.QueryLexer.FIELD:return e.nextClause(),o.QueryParser.parseField;case o.QueryLexer.EDIT_DISTANCE:return o.QueryParser.parseEditDistance;case o.QueryLexer.BOOST:return o.QueryParser.parseBoost;default:var r="Unexpected lexeme type '"+i.type+"'";throw new o.QueryParseError(r,i.start,i.end)}}},o.QueryParser.parseBoost=function(e){var t=e.consumeLexeme();if(void 0!=t){var n=parseInt(t.str,10);if(isNaN(n)){var r="boost must be numeric";throw new o.QueryParseError(r,t.start,t.end)}e.currentClause.boost=n;var i=e.peekLexeme();if(void 0==i)return void e.nextClause();switch(i.type){case o.QueryLexer.TERM:return e.nextClause(),o.QueryParser.parseTerm;case o.QueryLexer.FIELD:return e.nextClause(),o.QueryParser.parseField;case o.QueryLexer.EDIT_DISTANCE:return o.QueryParser.parseEditDistance;case o.QueryLexer.BOOST:return o.QueryParser.parseBoost;default:var r="Unexpected lexeme type '"+i.type+"'";throw new o.QueryParseError(r,i.start,i.end)}}},function(o,a){r=a,void 0!==(i="function"==typeof r?r.call(t,n,t,e):r)&&(e.exports=i)}(0,function(){return o})}()},function(e,t,n){"use strict";t.__esModule=!0;var r=n(44),i=function(e){return e&&e.__esModule?e:{default:e}}(r);t.default={Position:i.default}},function(e,t,n){"use strict";function r(e,t){if(!(e instanceof t))throw new TypeError("Cannot call a class as a function")}t.__esModule=!0;var i=function(){function e(t,n){r(this,e);var i="string"==typeof t?document.querySelector(t):t;if(!(i instanceof HTMLElement&&i.parentNode instanceof HTMLElement))throw new ReferenceError;if(this.el_=i,this.parent_=i.parentNode,!((i="string"==typeof n?document.querySelector(n):n)instanceof HTMLElement))throw new ReferenceError;this.header_=i,this.height_=0,this.pad_="fixed"===window.getComputedStyle(this.header_).position}return e.prototype.setup=function(){var e=Array.prototype.reduce.call(this.parent_.children,function(e,t){return Math.max(e,t.offsetTop)},0);this.offset_=e-(this.pad_?this.header_.offsetHeight:0),this.update()},e.prototype.update=function(e){var t=window.pageYOffset,n=window.innerHeight;e&&"resize"===e.type&&this.setup();var r={top:this.pad_?this.header_.offsetHeight:0,bottom:this.parent_.offsetTop+this.parent_.offsetHeight},i=n-r.top-Math.max(0,this.offset_-t)-Math.max(0,t+n-r.bottom);i!==this.height_&&(this.el_.style.height=(this.height_=i)+"px"),t>=this.offset_?"lock"!==this.el_.dataset.mdState&&(this.el_.dataset.mdState="lock"):"lock"===this.el_.dataset.mdState&&(this.el_.dataset.mdState="")},e.prototype.reset=function(){this.el_.dataset.mdState="",this.el_.style.height="",this.height_=0},e}();t.default=i},function(e,t,n){"use strict";function r(e){return e&&e.__esModule?e:{default:e}}t.__esModule=!0;var i=n(46),o=r(i),a=n(50),s=r(a);t.default={Adapter:o.default,Repository:s.default}},function(e,t,n){"use strict";t.__esModule=!0;var r=n(47),i=function(e){return e&&e.__esModule?e:{default:e}}(r);t.default={GitHub:i.default}},function(e,t,n){"use strict";function r(e,t){if(!(e instanceof t))throw new TypeError("Cannot call a class as a function")}function i(e,t){if(!e)throw new ReferenceError("this hasn't been initialised - super() hasn't been called");return!t||"object"!=typeof t&&"function"!=typeof t?e:t}function o(e,t){if("function"!=typeof t&&null!==t)throw new TypeError("Super expression must either be null or a function, not "+typeof t);e.prototype=Object.create(t&&t.prototype,{constructor:{value:e,enumerable:!1,writable:!0,configurable:!0}}),t&&(Object.setPrototypeOf?Object.setPrototypeOf(e,t):e.__proto__=t)}t.__esModule=!0;var a=n(48),s=function(e){return e&&e.__esModule?e:{default:e}}(a),c=function(e){function t(n){r(this,t);var o=i(this,e.call(this,n)),a=/^.+github\.com\/([^\/]+)\/?([^\/]+)?.*$/.exec(o.base_);if(a&&3===a.length){var s=a[1],c=a[2];o.base_="https://api.github.com/users/"+s+"/repos",o.name_=c}return o}return o(t,e),t.prototype.fetch_=function(){var e=this;return function t(){var n=arguments.length>0&&void 0!==arguments[0]?arguments[0]:0;return fetch(e.base_+"?per_page=30&page="+n).then(function(e){return e.json()}).then(function(r){if(!(r instanceof Array))throw new TypeError;if(e.name_){var i=r.find(function(t){return t.name===e.name_});return i||30!==r.length?i?[e.format_(i.stargazers_count)+" Stars",e.format_(i.forks_count)+" Forks"]:[]:t(n+1)}return[r.length+" Repositories"]})}()},t}(s.default);t.default=c},function(e,t,n){"use strict";function r(e,t){if(!(e instanceof t))throw new TypeError("Cannot call a class as a function")}t.__esModule=!0;var i=n(49),o=function(e){return e&&e.__esModule?e:{default:e}}(i),a=function(){function e(t){r(this,e);var n="string"==typeof t?document.querySelector(t):t;if(!(n instanceof HTMLAnchorElement))throw new ReferenceError;this.el_=n,this.base_=this.el_.href,this.salt_=this.hash_(this.base_)}return e.prototype.fetch=function(){var e=this;return new Promise(function(t){var n=o.default.getJSON(e.salt_+".cache-source");void 0!==n?t(n):e.fetch_().then(function(n){o.default.set(e.salt_+".cache-source",n,{expires:1/96}),t(n)})})},e.prototype.fetch_=function(){throw new Error("fetch_(): Not implemented")},e.prototype.format_=function(e){return e>1e4?(e/1e3).toFixed(0)+"k":e>1e3?(e/1e3).toFixed(1)+"k":""+e},e.prototype.hash_=function(e){var t=0;if(0===e.length)return t;for(var n=0,r=e.length;n1){if(o=e({path:"/"},r.defaults,o),"number"==typeof o.expires){var s=new Date;s.setMilliseconds(s.getMilliseconds()+864e5*o.expires),o.expires=s}o.expires=o.expires?o.expires.toUTCString():"";try{a=JSON.stringify(i),/^[\{\[]/.test(a)&&(i=a)}catch(e){}i=n.write?n.write(i,t):encodeURIComponent(String(i)).replace(/%(23|24|26|2B|3A|3C|3E|3D|2F|3F|40|5B|5D|5E|60|7B|7D|7C)/g,decodeURIComponent),t=encodeURIComponent(String(t)),t=t.replace(/%(23|24|26|2B|5E|60|7C)/g,decodeURIComponent),t=t.replace(/[\(\)]/g,escape);var c="";for(var u in o)o[u]&&(c+="; "+u,!0!==o[u]&&(c+="="+o[u]));return document.cookie=t+"="+i+c}t||(a={});for(var l=document.cookie?document.cookie.split("; "):[],d=/(%[0-9A-Z]{2})+/g,f=0;f=this.el_.children[0].offsetTop+-43;e!==this.active_&&(this.el_.dataset.mdState=(this.active_=e)?"hidden":"")},e.prototype.reset=function(){this.el_.dataset.mdState="",this.active_=!1},e}();t.default=i}])); \ No newline at end of file diff --git a/docs/assets/javascripts/lunr/lunr.da.js b/docs/assets/javascripts/lunr/lunr.da.js new file mode 100644 index 000000000..3b07b2c19 --- /dev/null +++ b/docs/assets/javascripts/lunr/lunr.da.js @@ -0,0 +1 @@ +!function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");var r,i,n;e.da=function(){this.pipeline.reset(),this.pipeline.add(e.da.trimmer,e.da.stopWordFilter,e.da.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.da.stemmer))},e.da.wordCharacters="A-Za-zªºÀ-ÖØ-öø-ʸˠ-ˤᴀ-ᴥᴬ-ᵜᵢ-ᵥᵫ-ᵷᵹ-ᶾḀ-ỿⁱⁿₐ-ₜKÅℲⅎⅠ-ↈⱠ-ⱿꜢ-ꞇꞋ-ꞭꞰ-ꞷꟷ-ꟿꬰ-ꭚꭜ-ꭤff-stA-Za-z",e.da.trimmer=e.trimmerSupport.generateTrimmer(e.da.wordCharacters),e.Pipeline.registerFunction(e.da.trimmer,"trimmer-da"),e.da.stemmer=(r=e.stemmerSupport.Among,i=e.stemmerSupport.SnowballProgram,n=new function(){var e,n,t,s=[new r("hed",-1,1),new r("ethed",0,1),new r("ered",-1,1),new r("e",-1,1),new r("erede",3,1),new r("ende",3,1),new r("erende",5,1),new r("ene",3,1),new r("erne",3,1),new r("ere",3,1),new r("en",-1,1),new r("heden",10,1),new r("eren",10,1),new r("er",-1,1),new r("heder",13,1),new r("erer",13,1),new r("s",-1,2),new r("heds",16,1),new r("es",16,1),new r("endes",18,1),new r("erendes",19,1),new r("enes",18,1),new r("ernes",18,1),new r("eres",18,1),new r("ens",16,1),new r("hedens",24,1),new r("erens",24,1),new r("ers",16,1),new r("ets",16,1),new r("erets",28,1),new r("et",-1,1),new r("eret",30,1)],o=[new r("gd",-1,-1),new r("dt",-1,-1),new r("gt",-1,-1),new r("kt",-1,-1)],a=[new r("ig",-1,1),new r("lig",0,1),new r("elig",1,1),new r("els",-1,1),new r("løst",-1,2)],d=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,48,0,128],u=[239,254,42,3,0,0,0,0,0,0,0,0,0,0,0,0,16],c=new i;function l(){var e,r=c.limit-c.cursor;c.cursor>=n&&(e=c.limit_backward,c.limit_backward=n,c.ket=c.cursor,c.find_among_b(o,4)?(c.bra=c.cursor,c.limit_backward=e,c.cursor=c.limit-r,c.cursor>c.limit_backward&&(c.cursor--,c.bra=c.cursor,c.slice_del())):c.limit_backward=e)}this.setCurrent=function(e){c.setCurrent(e)},this.getCurrent=function(){return c.getCurrent()},this.stem=function(){var r,i=c.cursor;return function(){var r,i=c.cursor+3;if(n=c.limit,0<=i&&i<=c.limit){for(e=i;;){if(r=c.cursor,c.in_grouping(d,97,248)){c.cursor=r;break}if(c.cursor=r,r>=c.limit)return;c.cursor++}for(;!c.out_grouping(d,97,248);){if(c.cursor>=c.limit)return;c.cursor++}(n=c.cursor)=n&&(r=c.limit_backward,c.limit_backward=n,c.ket=c.cursor,e=c.find_among_b(s,32),c.limit_backward=r,e))switch(c.bra=c.cursor,e){case 1:c.slice_del();break;case 2:c.in_grouping_b(u,97,229)&&c.slice_del()}}(),c.cursor=c.limit,l(),c.cursor=c.limit,function(){var e,r,i,t=c.limit-c.cursor;if(c.ket=c.cursor,c.eq_s_b(2,"st")&&(c.bra=c.cursor,c.eq_s_b(2,"ig")&&c.slice_del()),c.cursor=c.limit-t,c.cursor>=n&&(r=c.limit_backward,c.limit_backward=n,c.ket=c.cursor,e=c.find_among_b(a,5),c.limit_backward=r,e))switch(c.bra=c.cursor,e){case 1:c.slice_del(),i=c.limit-c.cursor,l(),c.cursor=c.limit-i;break;case 2:c.slice_from("løs")}}(),c.cursor=c.limit,c.cursor>=n&&(r=c.limit_backward,c.limit_backward=n,c.ket=c.cursor,c.out_grouping_b(d,97,248)?(c.bra=c.cursor,t=c.slice_to(t),c.limit_backward=r,c.eq_v_b(t)&&c.slice_del()):c.limit_backward=r),!0}},function(e){return"function"==typeof e.update?e.update(function(e){return n.setCurrent(e),n.stem(),n.getCurrent()}):(n.setCurrent(e),n.stem(),n.getCurrent())}),e.Pipeline.registerFunction(e.da.stemmer,"stemmer-da"),e.da.stopWordFilter=e.generateStopWordFilter("ad af alle alt anden at blev blive bliver da de dem den denne der deres det dette dig din disse dog du efter eller en end er et for fra ham han hans har havde have hende hendes her hos hun hvad hvis hvor i ikke ind jeg jer jo kunne man mange med meget men mig min mine mit mod ned noget nogle nu når og også om op os over på selv sig sin sine sit skal skulle som sådan thi til ud under var vi vil ville vor være været".split(" ")),e.Pipeline.registerFunction(e.da.stopWordFilter,"stopWordFilter-da")}}); \ No newline at end of file diff --git a/docs/assets/javascripts/lunr/lunr.de.js b/docs/assets/javascripts/lunr/lunr.de.js new file mode 100644 index 000000000..ebd78f281 --- /dev/null +++ b/docs/assets/javascripts/lunr/lunr.de.js @@ -0,0 +1 @@ +!function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");var r,n,i;e.de=function(){this.pipeline.reset(),this.pipeline.add(e.de.trimmer,e.de.stopWordFilter,e.de.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.de.stemmer))},e.de.wordCharacters="A-Za-zªºÀ-ÖØ-öø-ʸˠ-ˤᴀ-ᴥᴬ-ᵜᵢ-ᵥᵫ-ᵷᵹ-ᶾḀ-ỿⁱⁿₐ-ₜKÅℲⅎⅠ-ↈⱠ-ⱿꜢ-ꞇꞋ-ꞭꞰ-ꞷꟷ-ꟿꬰ-ꭚꭜ-ꭤff-stA-Za-z",e.de.trimmer=e.trimmerSupport.generateTrimmer(e.de.wordCharacters),e.Pipeline.registerFunction(e.de.trimmer,"trimmer-de"),e.de.stemmer=(r=e.stemmerSupport.Among,n=e.stemmerSupport.SnowballProgram,i=new function(){var e,i,s,t=[new r("",-1,6),new r("U",0,2),new r("Y",0,1),new r("ä",0,3),new r("ö",0,4),new r("ü",0,5)],o=[new r("e",-1,2),new r("em",-1,1),new r("en",-1,2),new r("ern",-1,1),new r("er",-1,1),new r("s",-1,3),new r("es",5,2)],c=[new r("en",-1,1),new r("er",-1,1),new r("st",-1,2),new r("est",2,1)],u=[new r("ig",-1,1),new r("lich",-1,1)],a=[new r("end",-1,1),new r("ig",-1,2),new r("ung",-1,1),new r("lich",-1,3),new r("isch",-1,2),new r("ik",-1,2),new r("heit",-1,3),new r("keit",-1,4)],d=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,8,0,32,8],l=[117,30,5],m=[117,30,4],h=new n;function w(e,r,n){return!(!h.eq_s(1,e)||(h.ket=h.cursor,!h.in_grouping(d,97,252)))&&(h.slice_from(r),h.cursor=n,!0)}function f(){for(;!h.in_grouping(d,97,252);){if(h.cursor>=h.limit)return!0;h.cursor++}for(;!h.out_grouping(d,97,252);){if(h.cursor>=h.limit)return!0;h.cursor++}return!1}function b(){return s<=h.cursor}function _(){return i<=h.cursor}this.setCurrent=function(e){h.setCurrent(e)},this.getCurrent=function(){return h.getCurrent()},this.stem=function(){var r=h.cursor;return function(){for(var e,r,n,i,s=h.cursor;;)if(e=h.cursor,h.bra=e,h.eq_s(1,"ß"))h.ket=h.cursor,h.slice_from("ss");else{if(e>=h.limit)break;h.cursor=e+1}for(h.cursor=s;;)for(r=h.cursor;;){if(n=h.cursor,h.in_grouping(d,97,252)){if(i=h.cursor,h.bra=i,w("u","U",n))break;if(h.cursor=i,w("y","Y",n))break}if(n>=h.limit)return void(h.cursor=r);h.cursor=n+1}}(),h.cursor=r,function(){s=h.limit,i=s;var r=h.cursor+3;0<=r&&r<=h.limit&&(e=r,f()||((s=h.cursor)=h.limit)return;h.cursor++}}}(),!0}},function(e){return"function"==typeof e.update?e.update(function(e){return i.setCurrent(e),i.stem(),i.getCurrent()}):(i.setCurrent(e),i.stem(),i.getCurrent())}),e.Pipeline.registerFunction(e.de.stemmer,"stemmer-de"),e.de.stopWordFilter=e.generateStopWordFilter("aber alle allem allen aller alles als also am an ander andere anderem anderen anderer anderes anderm andern anderr anders auch auf aus bei bin bis bist da damit dann das dasselbe dazu daß dein deine deinem deinen deiner deines dem demselben den denn denselben der derer derselbe derselben des desselben dessen dich die dies diese dieselbe dieselben diesem diesen dieser dieses dir doch dort du durch ein eine einem einen einer eines einig einige einigem einigen einiger einiges einmal er es etwas euch euer eure eurem euren eurer eures für gegen gewesen hab habe haben hat hatte hatten hier hin hinter ich ihm ihn ihnen ihr ihre ihrem ihren ihrer ihres im in indem ins ist jede jedem jeden jeder jedes jene jenem jenen jener jenes jetzt kann kein keine keinem keinen keiner keines können könnte machen man manche manchem manchen mancher manches mein meine meinem meinen meiner meines mich mir mit muss musste nach nicht nichts noch nun nur ob oder ohne sehr sein seine seinem seinen seiner seines selbst sich sie sind so solche solchem solchen solcher solches soll sollte sondern sonst um und uns unse unsem unsen unser unses unter viel vom von vor war waren warst was weg weil weiter welche welchem welchen welcher welches wenn werde werden wie wieder will wir wird wirst wo wollen wollte während würde würden zu zum zur zwar zwischen über".split(" ")),e.Pipeline.registerFunction(e.de.stopWordFilter,"stopWordFilter-de")}}); \ No newline at end of file diff --git a/docs/assets/javascripts/lunr/lunr.du.js b/docs/assets/javascripts/lunr/lunr.du.js new file mode 100644 index 000000000..375c0e763 --- /dev/null +++ b/docs/assets/javascripts/lunr/lunr.du.js @@ -0,0 +1 @@ +!function(r,e){"function"==typeof define&&define.amd?define(e):"object"==typeof exports?module.exports=e():e()(r.lunr)}(this,function(){return function(r){if(void 0===r)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===r.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");var e,i,n;r.du=function(){this.pipeline.reset(),this.pipeline.add(r.du.trimmer,r.du.stopWordFilter,r.du.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(r.du.stemmer))},r.du.wordCharacters="A-Za-zªºÀ-ÖØ-öø-ʸˠ-ˤᴀ-ᴥᴬ-ᵜᵢ-ᵥᵫ-ᵷᵹ-ᶾḀ-ỿⁱⁿₐ-ₜKÅℲⅎⅠ-ↈⱠ-ⱿꜢ-ꞇꞋ-ꞭꞰ-ꞷꟷ-ꟿꬰ-ꭚꭜ-ꭤff-stA-Za-z",r.du.trimmer=r.trimmerSupport.generateTrimmer(r.du.wordCharacters),r.Pipeline.registerFunction(r.du.trimmer,"trimmer-du"),r.du.stemmer=(e=r.stemmerSupport.Among,i=r.stemmerSupport.SnowballProgram,n=new function(){var r,n,o,t=[new e("",-1,6),new e("á",0,1),new e("ä",0,1),new e("é",0,2),new e("ë",0,2),new e("í",0,3),new e("ï",0,3),new e("ó",0,4),new e("ö",0,4),new e("ú",0,5),new e("ü",0,5)],s=[new e("",-1,3),new e("I",0,2),new e("Y",0,1)],u=[new e("dd",-1,-1),new e("kk",-1,-1),new e("tt",-1,-1)],c=[new e("ene",-1,2),new e("se",-1,3),new e("en",-1,2),new e("heden",2,1),new e("s",-1,3)],a=[new e("end",-1,1),new e("ig",-1,2),new e("ing",-1,1),new e("lijk",-1,3),new e("baar",-1,4),new e("bar",-1,5)],l=[new e("aa",-1,-1),new e("ee",-1,-1),new e("oo",-1,-1),new e("uu",-1,-1)],m=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,128],d=[1,0,0,17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,128],f=[17,67,16,1,0,0,0,0,0,0,0,0,0,0,0,0,128],_=new i;function w(r){return _.cursor=r,r>=_.limit||(_.cursor++,!1)}function b(){for(;!_.in_grouping(m,97,232);){if(_.cursor>=_.limit)return!0;_.cursor++}for(;!_.out_grouping(m,97,232);){if(_.cursor>=_.limit)return!0;_.cursor++}return!1}function p(){return n<=_.cursor}function g(){return r<=_.cursor}function h(){var r=_.limit-_.cursor;_.find_among_b(u,3)&&(_.cursor=_.limit-r,_.ket=_.cursor,_.cursor>_.limit_backward&&(_.cursor--,_.bra=_.cursor,_.slice_del()))}function k(){var r;o=!1,_.ket=_.cursor,_.eq_s_b(1,"e")&&(_.bra=_.cursor,p()&&(r=_.limit-_.cursor,_.out_grouping_b(m,97,232)&&(_.cursor=_.limit-r,_.slice_del(),o=!0,h())))}function v(){var r;p()&&(r=_.limit-_.cursor,_.out_grouping_b(m,97,232)&&(_.cursor=_.limit-r,_.eq_s_b(3,"gem")||(_.cursor=_.limit-r,_.slice_del(),h())))}this.setCurrent=function(r){_.setCurrent(r)},this.getCurrent=function(){return _.getCurrent()},this.stem=function(){var e=_.cursor;return function(){for(var r,e,i,n=_.cursor;;){if(_.bra=_.cursor,r=_.find_among(t,11))switch(_.ket=_.cursor,r){case 1:_.slice_from("a");continue;case 2:_.slice_from("e");continue;case 3:_.slice_from("i");continue;case 4:_.slice_from("o");continue;case 5:_.slice_from("u");continue;case 6:if(_.cursor>=_.limit)break;_.cursor++;continue}break}for(_.cursor=n,_.bra=n,_.eq_s(1,"y")?(_.ket=_.cursor,_.slice_from("Y")):_.cursor=n;;)if(e=_.cursor,_.in_grouping(m,97,232)){if(i=_.cursor,_.bra=i,_.eq_s(1,"i"))_.ket=_.cursor,_.in_grouping(m,97,232)&&(_.slice_from("I"),_.cursor=e);else if(_.cursor=i,_.eq_s(1,"y"))_.ket=_.cursor,_.slice_from("Y"),_.cursor=e;else if(w(e))break}else if(w(e))break}(),_.cursor=e,n=_.limit,r=n,b()||((n=_.cursor)<3&&(n=3),b()||(r=_.cursor)),_.limit_backward=e,_.cursor=_.limit,function(){var r,e,i,n,t,s,u=_.limit-_.cursor;if(_.ket=_.cursor,r=_.find_among_b(c,5))switch(_.bra=_.cursor,r){case 1:p()&&_.slice_from("heid");break;case 2:v();break;case 3:p()&&_.out_grouping_b(f,97,232)&&_.slice_del()}if(_.cursor=_.limit-u,k(),_.cursor=_.limit-u,_.ket=_.cursor,_.eq_s_b(4,"heid")&&(_.bra=_.cursor,g()&&(e=_.limit-_.cursor,_.eq_s_b(1,"c")||(_.cursor=_.limit-e,_.slice_del(),_.ket=_.cursor,_.eq_s_b(2,"en")&&(_.bra=_.cursor,v())))),_.cursor=_.limit-u,_.ket=_.cursor,r=_.find_among_b(a,6))switch(_.bra=_.cursor,r){case 1:if(g()){if(_.slice_del(),i=_.limit-_.cursor,_.ket=_.cursor,_.eq_s_b(2,"ig")&&(_.bra=_.cursor,g()&&(n=_.limit-_.cursor,!_.eq_s_b(1,"e")))){_.cursor=_.limit-n,_.slice_del();break}_.cursor=_.limit-i,h()}break;case 2:g()&&(t=_.limit-_.cursor,_.eq_s_b(1,"e")||(_.cursor=_.limit-t,_.slice_del()));break;case 3:g()&&(_.slice_del(),k());break;case 4:g()&&_.slice_del();break;case 5:g()&&o&&_.slice_del()}_.cursor=_.limit-u,_.out_grouping_b(d,73,232)&&(s=_.limit-_.cursor,_.find_among_b(l,4)&&_.out_grouping_b(m,97,232)&&(_.cursor=_.limit-s,_.ket=_.cursor,_.cursor>_.limit_backward&&(_.cursor--,_.bra=_.cursor,_.slice_del())))}(),_.cursor=_.limit_backward,function(){for(var r;;)if(_.bra=_.cursor,r=_.find_among(s,3))switch(_.ket=_.cursor,r){case 1:_.slice_from("y");break;case 2:_.slice_from("i");break;case 3:if(_.cursor>=_.limit)return;_.cursor++}}(),!0}},function(r){return"function"==typeof r.update?r.update(function(r){return n.setCurrent(r),n.stem(),n.getCurrent()}):(n.setCurrent(r),n.stem(),n.getCurrent())}),r.Pipeline.registerFunction(r.du.stemmer,"stemmer-du"),r.du.stopWordFilter=r.generateStopWordFilter(" aan al alles als altijd andere ben bij daar dan dat de der deze die dit doch doen door dus een eens en er ge geen geweest haar had heb hebben heeft hem het hier hij hoe hun iemand iets ik in is ja je kan kon kunnen maar me meer men met mij mijn moet na naar niet niets nog nu of om omdat onder ons ook op over reeds te tegen toch toen tot u uit uw van veel voor want waren was wat werd wezen wie wil worden wordt zal ze zelf zich zij zijn zo zonder zou".split(" ")),r.Pipeline.registerFunction(r.du.stopWordFilter,"stopWordFilter-du")}}); \ No newline at end of file diff --git a/docs/assets/javascripts/lunr/lunr.es.js b/docs/assets/javascripts/lunr/lunr.es.js new file mode 100644 index 000000000..4cb634f0a --- /dev/null +++ b/docs/assets/javascripts/lunr/lunr.es.js @@ -0,0 +1 @@ +!function(e,s){"function"==typeof define&&define.amd?define(s):"object"==typeof exports?module.exports=s():s()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");var s,r,n;e.es=function(){this.pipeline.reset(),this.pipeline.add(e.es.trimmer,e.es.stopWordFilter,e.es.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.es.stemmer))},e.es.wordCharacters="A-Za-zªºÀ-ÖØ-öø-ʸˠ-ˤᴀ-ᴥᴬ-ᵜᵢ-ᵥᵫ-ᵷᵹ-ᶾḀ-ỿⁱⁿₐ-ₜKÅℲⅎⅠ-ↈⱠ-ⱿꜢ-ꞇꞋ-ꞭꞰ-ꞷꟷ-ꟿꬰ-ꭚꭜ-ꭤff-stA-Za-z",e.es.trimmer=e.trimmerSupport.generateTrimmer(e.es.wordCharacters),e.Pipeline.registerFunction(e.es.trimmer,"trimmer-es"),e.es.stemmer=(s=e.stemmerSupport.Among,r=e.stemmerSupport.SnowballProgram,n=new function(){var e,n,i,a=[new s("",-1,6),new s("á",0,1),new s("é",0,2),new s("í",0,3),new s("ó",0,4),new s("ú",0,5)],t=[new s("la",-1,-1),new s("sela",0,-1),new s("le",-1,-1),new s("me",-1,-1),new s("se",-1,-1),new s("lo",-1,-1),new s("selo",5,-1),new s("las",-1,-1),new s("selas",7,-1),new s("les",-1,-1),new s("los",-1,-1),new s("selos",10,-1),new s("nos",-1,-1)],o=[new s("ando",-1,6),new s("iendo",-1,6),new s("yendo",-1,7),new s("ándo",-1,2),new s("iéndo",-1,1),new s("ar",-1,6),new s("er",-1,6),new s("ir",-1,6),new s("ár",-1,3),new s("ér",-1,4),new s("ír",-1,5)],u=[new s("ic",-1,-1),new s("ad",-1,-1),new s("os",-1,-1),new s("iv",-1,1)],w=[new s("able",-1,1),new s("ible",-1,1),new s("ante",-1,1)],c=[new s("ic",-1,1),new s("abil",-1,1),new s("iv",-1,1)],m=[new s("ica",-1,1),new s("ancia",-1,2),new s("encia",-1,5),new s("adora",-1,2),new s("osa",-1,1),new s("ista",-1,1),new s("iva",-1,9),new s("anza",-1,1),new s("logía",-1,3),new s("idad",-1,8),new s("able",-1,1),new s("ible",-1,1),new s("ante",-1,2),new s("mente",-1,7),new s("amente",13,6),new s("ación",-1,2),new s("ución",-1,4),new s("ico",-1,1),new s("ismo",-1,1),new s("oso",-1,1),new s("amiento",-1,1),new s("imiento",-1,1),new s("ivo",-1,9),new s("ador",-1,2),new s("icas",-1,1),new s("ancias",-1,2),new s("encias",-1,5),new s("adoras",-1,2),new s("osas",-1,1),new s("istas",-1,1),new s("ivas",-1,9),new s("anzas",-1,1),new s("logías",-1,3),new s("idades",-1,8),new s("ables",-1,1),new s("ibles",-1,1),new s("aciones",-1,2),new s("uciones",-1,4),new s("adores",-1,2),new s("antes",-1,2),new s("icos",-1,1),new s("ismos",-1,1),new s("osos",-1,1),new s("amientos",-1,1),new s("imientos",-1,1),new s("ivos",-1,9)],l=[new s("ya",-1,1),new s("ye",-1,1),new s("yan",-1,1),new s("yen",-1,1),new s("yeron",-1,1),new s("yendo",-1,1),new s("yo",-1,1),new s("yas",-1,1),new s("yes",-1,1),new s("yais",-1,1),new s("yamos",-1,1),new s("yó",-1,1)],d=[new s("aba",-1,2),new s("ada",-1,2),new s("ida",-1,2),new s("ara",-1,2),new s("iera",-1,2),new s("ía",-1,2),new s("aría",5,2),new s("ería",5,2),new s("iría",5,2),new s("ad",-1,2),new s("ed",-1,2),new s("id",-1,2),new s("ase",-1,2),new s("iese",-1,2),new s("aste",-1,2),new s("iste",-1,2),new s("an",-1,2),new s("aban",16,2),new s("aran",16,2),new s("ieran",16,2),new s("ían",16,2),new s("arían",20,2),new s("erían",20,2),new s("irían",20,2),new s("en",-1,1),new s("asen",24,2),new s("iesen",24,2),new s("aron",-1,2),new s("ieron",-1,2),new s("arán",-1,2),new s("erán",-1,2),new s("irán",-1,2),new s("ado",-1,2),new s("ido",-1,2),new s("ando",-1,2),new s("iendo",-1,2),new s("ar",-1,2),new s("er",-1,2),new s("ir",-1,2),new s("as",-1,2),new s("abas",39,2),new s("adas",39,2),new s("idas",39,2),new s("aras",39,2),new s("ieras",39,2),new s("ías",39,2),new s("arías",45,2),new s("erías",45,2),new s("irías",45,2),new s("es",-1,1),new s("ases",49,2),new s("ieses",49,2),new s("abais",-1,2),new s("arais",-1,2),new s("ierais",-1,2),new s("íais",-1,2),new s("aríais",55,2),new s("eríais",55,2),new s("iríais",55,2),new s("aseis",-1,2),new s("ieseis",-1,2),new s("asteis",-1,2),new s("isteis",-1,2),new s("áis",-1,2),new s("éis",-1,1),new s("aréis",64,2),new s("eréis",64,2),new s("iréis",64,2),new s("ados",-1,2),new s("idos",-1,2),new s("amos",-1,2),new s("ábamos",70,2),new s("áramos",70,2),new s("iéramos",70,2),new s("íamos",70,2),new s("aríamos",74,2),new s("eríamos",74,2),new s("iríamos",74,2),new s("emos",-1,1),new s("aremos",78,2),new s("eremos",78,2),new s("iremos",78,2),new s("ásemos",78,2),new s("iésemos",78,2),new s("imos",-1,2),new s("arás",-1,2),new s("erás",-1,2),new s("irás",-1,2),new s("ís",-1,2),new s("ará",-1,2),new s("erá",-1,2),new s("irá",-1,2),new s("aré",-1,2),new s("eré",-1,2),new s("iré",-1,2),new s("ió",-1,2)],b=[new s("a",-1,1),new s("e",-1,2),new s("o",-1,1),new s("os",-1,1),new s("á",-1,1),new s("é",-1,2),new s("í",-1,1),new s("ó",-1,1)],f=[17,65,16,0,0,0,0,0,0,0,0,0,0,0,0,0,1,17,4,10],_=new r;function h(){if(_.out_grouping(f,97,252)){for(;!_.in_grouping(f,97,252);){if(_.cursor>=_.limit)return!0;_.cursor++}return!1}return!0}function v(){var e,s=_.cursor;if(function(){if(_.in_grouping(f,97,252)){var e=_.cursor;if(h()){if(_.cursor=e,!_.in_grouping(f,97,252))return!0;for(;!_.out_grouping(f,97,252);){if(_.cursor>=_.limit)return!0;_.cursor++}}return!1}return!0}()){if(_.cursor=s,!_.out_grouping(f,97,252))return;if(e=_.cursor,h()){if(_.cursor=e,!_.in_grouping(f,97,252)||_.cursor>=_.limit)return;_.cursor++}}i=_.cursor}function p(){for(;!_.in_grouping(f,97,252);){if(_.cursor>=_.limit)return!1;_.cursor++}for(;!_.out_grouping(f,97,252);){if(_.cursor>=_.limit)return!1;_.cursor++}return!0}function g(){return i<=_.cursor}function k(){return e<=_.cursor}function y(e,s){if(!k())return!0;_.slice_del(),_.ket=_.cursor;var r=_.find_among_b(e,s);return r&&(_.bra=_.cursor,1==r&&k()&&_.slice_del()),!1}function q(e){return!k()||(_.slice_del(),_.ket=_.cursor,_.eq_s_b(2,e)&&(_.bra=_.cursor,k()&&_.slice_del()),!1)}function C(){var e;if(_.ket=_.cursor,e=_.find_among_b(m,46)){switch(_.bra=_.cursor,e){case 1:if(!k())return!1;_.slice_del();break;case 2:if(q("ic"))return!1;break;case 3:if(!k())return!1;_.slice_from("log");break;case 4:if(!k())return!1;_.slice_from("u");break;case 5:if(!k())return!1;_.slice_from("ente");break;case 6:if(!(n<=_.cursor))return!1;_.slice_del(),_.ket=_.cursor,(e=_.find_among_b(u,4))&&(_.bra=_.cursor,k()&&(_.slice_del(),1==e&&(_.ket=_.cursor,_.eq_s_b(2,"at")&&(_.bra=_.cursor,k()&&_.slice_del()))));break;case 7:if(y(w,3))return!1;break;case 8:if(y(c,3))return!1;break;case 9:if(q("at"))return!1}return!0}return!1}this.setCurrent=function(e){_.setCurrent(e)},this.getCurrent=function(){return _.getCurrent()},this.stem=function(){var s,r=_.cursor;return s=_.cursor,i=_.limit,n=i,e=i,v(),_.cursor=s,p()&&(n=_.cursor,p()&&(e=_.cursor)),_.limit_backward=r,_.cursor=_.limit,function(){var e;if(_.ket=_.cursor,_.find_among_b(t,13)&&(_.bra=_.cursor,(e=_.find_among_b(o,11))&&g()))switch(e){case 1:_.bra=_.cursor,_.slice_from("iendo");break;case 2:_.bra=_.cursor,_.slice_from("ando");break;case 3:_.bra=_.cursor,_.slice_from("ar");break;case 4:_.bra=_.cursor,_.slice_from("er");break;case 5:_.bra=_.cursor,_.slice_from("ir");break;case 6:_.slice_del();break;case 7:_.eq_s_b(1,"u")&&_.slice_del()}}(),_.cursor=_.limit,C()||(_.cursor=_.limit,function(){var e,s;if(_.cursor>=i&&(s=_.limit_backward,_.limit_backward=i,_.ket=_.cursor,e=_.find_among_b(l,12),_.limit_backward=s,e)){if(_.bra=_.cursor,1==e){if(!_.eq_s_b(1,"u"))return!1;_.slice_del()}return!0}return!1}()||(_.cursor=_.limit,function(){var e,s,r,n;if(_.cursor>=i&&(s=_.limit_backward,_.limit_backward=i,_.ket=_.cursor,e=_.find_among_b(d,96),_.limit_backward=s,e))switch(_.bra=_.cursor,e){case 1:r=_.limit-_.cursor,_.eq_s_b(1,"u")?(n=_.limit-_.cursor,_.eq_s_b(1,"g")?_.cursor=_.limit-n:_.cursor=_.limit-r):_.cursor=_.limit-r,_.bra=_.cursor;case 2:_.slice_del()}}())),_.cursor=_.limit,function(){var e,s;if(_.ket=_.cursor,e=_.find_among_b(b,8))switch(_.bra=_.cursor,e){case 1:g()&&_.slice_del();break;case 2:g()&&(_.slice_del(),_.ket=_.cursor,_.eq_s_b(1,"u")&&(_.bra=_.cursor,s=_.limit-_.cursor,_.eq_s_b(1,"g")&&(_.cursor=_.limit-s,g()&&_.slice_del())))}}(),_.cursor=_.limit_backward,function(){for(var e;;){if(_.bra=_.cursor,e=_.find_among(a,6))switch(_.ket=_.cursor,e){case 1:_.slice_from("a");continue;case 2:_.slice_from("e");continue;case 3:_.slice_from("i");continue;case 4:_.slice_from("o");continue;case 5:_.slice_from("u");continue;case 6:if(_.cursor>=_.limit)break;_.cursor++;continue}break}}(),!0}},function(e){return"function"==typeof e.update?e.update(function(e){return n.setCurrent(e),n.stem(),n.getCurrent()}):(n.setCurrent(e),n.stem(),n.getCurrent())}),e.Pipeline.registerFunction(e.es.stemmer,"stemmer-es"),e.es.stopWordFilter=e.generateStopWordFilter("a al algo algunas algunos ante antes como con contra cual cuando de del desde donde durante e el ella ellas ellos en entre era erais eran eras eres es esa esas ese eso esos esta estaba estabais estaban estabas estad estada estadas estado estados estamos estando estar estaremos estará estarán estarás estaré estaréis estaría estaríais estaríamos estarían estarías estas este estemos esto estos estoy estuve estuviera estuvierais estuvieran estuvieras estuvieron estuviese estuvieseis estuviesen estuvieses estuvimos estuviste estuvisteis estuviéramos estuviésemos estuvo está estábamos estáis están estás esté estéis estén estés fue fuera fuerais fueran fueras fueron fuese fueseis fuesen fueses fui fuimos fuiste fuisteis fuéramos fuésemos ha habida habidas habido habidos habiendo habremos habrá habrán habrás habré habréis habría habríais habríamos habrían habrías habéis había habíais habíamos habían habías han has hasta hay haya hayamos hayan hayas hayáis he hemos hube hubiera hubierais hubieran hubieras hubieron hubiese hubieseis hubiesen hubieses hubimos hubiste hubisteis hubiéramos hubiésemos hubo la las le les lo los me mi mis mucho muchos muy más mí mía mías mío míos nada ni no nos nosotras nosotros nuestra nuestras nuestro nuestros o os otra otras otro otros para pero poco por porque que quien quienes qué se sea seamos sean seas seremos será serán serás seré seréis sería seríais seríamos serían serías seáis sido siendo sin sobre sois somos son soy su sus suya suyas suyo suyos sí también tanto te tendremos tendrá tendrán tendrás tendré tendréis tendría tendríais tendríamos tendrían tendrías tened tenemos tenga tengamos tengan tengas tengo tengáis tenida tenidas tenido tenidos teniendo tenéis tenía teníais teníamos tenían tenías ti tiene tienen tienes todo todos tu tus tuve tuviera tuvierais tuvieran tuvieras tuvieron tuviese tuvieseis tuviesen tuvieses tuvimos tuviste tuvisteis tuviéramos tuviésemos tuvo tuya tuyas tuyo tuyos tú un una uno unos vosotras vosotros vuestra vuestras vuestro vuestros y ya yo él éramos".split(" ")),e.Pipeline.registerFunction(e.es.stopWordFilter,"stopWordFilter-es")}}); \ No newline at end of file diff --git a/docs/assets/javascripts/lunr/lunr.fi.js b/docs/assets/javascripts/lunr/lunr.fi.js new file mode 100644 index 000000000..0200b1fcb --- /dev/null +++ b/docs/assets/javascripts/lunr/lunr.fi.js @@ -0,0 +1 @@ +!function(i,e){"function"==typeof define&&define.amd?define(e):"object"==typeof exports?module.exports=e():e()(i.lunr)}(this,function(){return function(i){if(void 0===i)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===i.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");var e,r,n;i.fi=function(){this.pipeline.reset(),this.pipeline.add(i.fi.trimmer,i.fi.stopWordFilter,i.fi.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(i.fi.stemmer))},i.fi.wordCharacters="A-Za-zªºÀ-ÖØ-öø-ʸˠ-ˤᴀ-ᴥᴬ-ᵜᵢ-ᵥᵫ-ᵷᵹ-ᶾḀ-ỿⁱⁿₐ-ₜKÅℲⅎⅠ-ↈⱠ-ⱿꜢ-ꞇꞋ-ꞭꞰ-ꞷꟷ-ꟿꬰ-ꭚꭜ-ꭤff-stA-Za-z",i.fi.trimmer=i.trimmerSupport.generateTrimmer(i.fi.wordCharacters),i.Pipeline.registerFunction(i.fi.trimmer,"trimmer-fi"),i.fi.stemmer=(e=i.stemmerSupport.Among,r=i.stemmerSupport.SnowballProgram,n=new function(){var i,n,t,s,l=[new e("pa",-1,1),new e("sti",-1,2),new e("kaan",-1,1),new e("han",-1,1),new e("kin",-1,1),new e("hän",-1,1),new e("kään",-1,1),new e("ko",-1,1),new e("pä",-1,1),new e("kö",-1,1)],o=[new e("lla",-1,-1),new e("na",-1,-1),new e("ssa",-1,-1),new e("ta",-1,-1),new e("lta",3,-1),new e("sta",3,-1)],a=[new e("llä",-1,-1),new e("nä",-1,-1),new e("ssä",-1,-1),new e("tä",-1,-1),new e("ltä",3,-1),new e("stä",3,-1)],u=[new e("lle",-1,-1),new e("ine",-1,-1)],c=[new e("nsa",-1,3),new e("mme",-1,3),new e("nne",-1,3),new e("ni",-1,2),new e("si",-1,1),new e("an",-1,4),new e("en",-1,6),new e("än",-1,5),new e("nsä",-1,3)],m=[new e("aa",-1,-1),new e("ee",-1,-1),new e("ii",-1,-1),new e("oo",-1,-1),new e("uu",-1,-1),new e("ää",-1,-1),new e("öö",-1,-1)],w=[new e("a",-1,8),new e("lla",0,-1),new e("na",0,-1),new e("ssa",0,-1),new e("ta",0,-1),new e("lta",4,-1),new e("sta",4,-1),new e("tta",4,9),new e("lle",-1,-1),new e("ine",-1,-1),new e("ksi",-1,-1),new e("n",-1,7),new e("han",11,1),new e("den",11,-1,C),new e("seen",11,-1,v),new e("hen",11,2),new e("tten",11,-1,C),new e("hin",11,3),new e("siin",11,-1,C),new e("hon",11,4),new e("hän",11,5),new e("hön",11,6),new e("ä",-1,8),new e("llä",22,-1),new e("nä",22,-1),new e("ssä",22,-1),new e("tä",22,-1),new e("ltä",26,-1),new e("stä",26,-1),new e("ttä",26,9)],_=[new e("eja",-1,-1),new e("mma",-1,1),new e("imma",1,-1),new e("mpa",-1,1),new e("impa",3,-1),new e("mmi",-1,1),new e("immi",5,-1),new e("mpi",-1,1),new e("impi",7,-1),new e("ejä",-1,-1),new e("mmä",-1,1),new e("immä",10,-1),new e("mpä",-1,1),new e("impä",12,-1)],k=[new e("i",-1,-1),new e("j",-1,-1)],b=[new e("mma",-1,1),new e("imma",0,-1)],d=[17,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8],f=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,8,0,32],h=[17,65,16,0,0,0,0,0,0,0,0,0,0,0,0,0,8,0,32],p=[17,97,24,1,0,0,0,0,0,0,0,0,0,0,0,0,8,0,32],g=new r;function j(){for(var i;i=g.cursor,!g.in_grouping(f,97,246);){if(g.cursor=i,i>=g.limit)return!0;g.cursor++}for(g.cursor=i;!g.out_grouping(f,97,246);){if(g.cursor>=g.limit)return!0;g.cursor++}return!1}function q(){var i,e;if(g.cursor>=s)if(e=g.limit_backward,g.limit_backward=s,g.ket=g.cursor,i=g.find_among_b(l,10)){switch(g.bra=g.cursor,g.limit_backward=e,i){case 1:if(!g.in_grouping_b(p,97,246))return;break;case 2:if(!(t<=g.cursor))return}g.slice_del()}else g.limit_backward=e}function v(){return g.find_among_b(m,7)}function C(){return g.eq_s_b(1,"i")&&g.in_grouping_b(h,97,246)}this.setCurrent=function(i){g.setCurrent(i)},this.getCurrent=function(){return g.getCurrent()},this.stem=function(){var e,r=g.cursor;return s=g.limit,t=s,j()||(s=g.cursor,j()||(t=g.cursor)),i=!1,g.limit_backward=r,g.cursor=g.limit,q(),g.cursor=g.limit,function(){var i,e,r;if(g.cursor>=s)if(e=g.limit_backward,g.limit_backward=s,g.ket=g.cursor,i=g.find_among_b(c,9))switch(g.bra=g.cursor,g.limit_backward=e,i){case 1:r=g.limit-g.cursor,g.eq_s_b(1,"k")||(g.cursor=g.limit-r,g.slice_del());break;case 2:g.slice_del(),g.ket=g.cursor,g.eq_s_b(3,"kse")&&(g.bra=g.cursor,g.slice_from("ksi"));break;case 3:g.slice_del();break;case 4:g.find_among_b(o,6)&&g.slice_del();break;case 5:g.find_among_b(a,6)&&g.slice_del();break;case 6:g.find_among_b(u,2)&&g.slice_del()}else g.limit_backward=e}(),g.cursor=g.limit,function(){var e,r,n;if(g.cursor>=s)if(r=g.limit_backward,g.limit_backward=s,g.ket=g.cursor,e=g.find_among_b(w,30)){switch(g.bra=g.cursor,g.limit_backward=r,e){case 1:if(!g.eq_s_b(1,"a"))return;break;case 2:case 9:if(!g.eq_s_b(1,"e"))return;break;case 3:if(!g.eq_s_b(1,"i"))return;break;case 4:if(!g.eq_s_b(1,"o"))return;break;case 5:if(!g.eq_s_b(1,"ä"))return;break;case 6:if(!g.eq_s_b(1,"ö"))return;break;case 7:if(n=g.limit-g.cursor,!v()&&(g.cursor=g.limit-n,!g.eq_s_b(2,"ie"))){g.cursor=g.limit-n;break}if(g.cursor=g.limit-n,g.cursor<=g.limit_backward){g.cursor=g.limit-n;break}g.cursor--,g.bra=g.cursor;break;case 8:if(!g.in_grouping_b(f,97,246)||!g.out_grouping_b(f,97,246))return}g.slice_del(),i=!0}else g.limit_backward=r}(),g.cursor=g.limit,function(){var i,e,r;if(g.cursor>=t)if(e=g.limit_backward,g.limit_backward=t,g.ket=g.cursor,i=g.find_among_b(_,14)){if(g.bra=g.cursor,g.limit_backward=e,1==i){if(r=g.limit-g.cursor,g.eq_s_b(2,"po"))return;g.cursor=g.limit-r}g.slice_del()}else g.limit_backward=e}(),g.cursor=g.limit,i?(g.cursor>=s&&(e=g.limit_backward,g.limit_backward=s,g.ket=g.cursor,g.find_among_b(k,2)?(g.bra=g.cursor,g.limit_backward=e,g.slice_del()):g.limit_backward=e),g.cursor=g.limit):(g.cursor=g.limit,function(){var i,e,r,n,l,o;if(g.cursor>=s){if(e=g.limit_backward,g.limit_backward=s,g.ket=g.cursor,g.eq_s_b(1,"t")&&(g.bra=g.cursor,r=g.limit-g.cursor,g.in_grouping_b(f,97,246)&&(g.cursor=g.limit-r,g.slice_del(),g.limit_backward=e,n=g.limit-g.cursor,g.cursor>=t&&(g.cursor=t,l=g.limit_backward,g.limit_backward=g.cursor,g.cursor=g.limit-n,g.ket=g.cursor,i=g.find_among_b(b,2))))){if(g.bra=g.cursor,g.limit_backward=l,1==i){if(o=g.limit-g.cursor,g.eq_s_b(2,"po"))return;g.cursor=g.limit-o}return void g.slice_del()}g.limit_backward=e}}(),g.cursor=g.limit),function(){var i,e,r,t;if(g.cursor>=s){for(i=g.limit_backward,g.limit_backward=s,e=g.limit-g.cursor,v()&&(g.cursor=g.limit-e,g.ket=g.cursor,g.cursor>g.limit_backward&&(g.cursor--,g.bra=g.cursor,g.slice_del())),g.cursor=g.limit-e,g.ket=g.cursor,g.in_grouping_b(d,97,228)&&(g.bra=g.cursor,g.out_grouping_b(f,97,246)&&g.slice_del()),g.cursor=g.limit-e,g.ket=g.cursor,g.eq_s_b(1,"j")&&(g.bra=g.cursor,r=g.limit-g.cursor,g.eq_s_b(1,"o")?g.slice_del():(g.cursor=g.limit-r,g.eq_s_b(1,"u")&&g.slice_del())),g.cursor=g.limit-e,g.ket=g.cursor,g.eq_s_b(1,"o")&&(g.bra=g.cursor,g.eq_s_b(1,"j")&&g.slice_del()),g.cursor=g.limit-e,g.limit_backward=i;;){if(t=g.limit-g.cursor,g.out_grouping_b(f,97,246)){g.cursor=g.limit-t;break}if(g.cursor=g.limit-t,g.cursor<=g.limit_backward)return;g.cursor--}g.ket=g.cursor,g.cursor>g.limit_backward&&(g.cursor--,g.bra=g.cursor,n=g.slice_to(),g.eq_v_b(n)&&g.slice_del())}}(),!0}},function(i){return"function"==typeof i.update?i.update(function(i){return n.setCurrent(i),n.stem(),n.getCurrent()}):(n.setCurrent(i),n.stem(),n.getCurrent())}),i.Pipeline.registerFunction(i.fi.stemmer,"stemmer-fi"),i.fi.stopWordFilter=i.generateStopWordFilter("ei eivät emme en et ette että he heidän heidät heihin heille heillä heiltä heissä heistä heitä hän häneen hänelle hänellä häneltä hänen hänessä hänestä hänet häntä itse ja johon joiden joihin joiksi joilla joille joilta joina joissa joista joita joka joksi jolla jolle jolta jona jonka jos jossa josta jota jotka kanssa keiden keihin keiksi keille keillä keiltä keinä keissä keistä keitä keneen keneksi kenelle kenellä keneltä kenen kenenä kenessä kenestä kenet ketkä ketkä ketä koska kuin kuka kun me meidän meidät meihin meille meillä meiltä meissä meistä meitä mihin miksi mikä mille millä miltä minkä minkä minua minulla minulle minulta minun minussa minusta minut minuun minä minä missä mistä mitkä mitä mukaan mutta ne niiden niihin niiksi niille niillä niiltä niin niin niinä niissä niistä niitä noiden noihin noiksi noilla noille noilta noin noina noissa noista noita nuo nyt näiden näihin näiksi näille näillä näiltä näinä näissä näistä näitä nämä ole olemme olen olet olette oli olimme olin olisi olisimme olisin olisit olisitte olisivat olit olitte olivat olla olleet ollut on ovat poikki se sekä sen siihen siinä siitä siksi sille sillä sillä siltä sinua sinulla sinulle sinulta sinun sinussa sinusta sinut sinuun sinä sinä sitä tai te teidän teidät teihin teille teillä teiltä teissä teistä teitä tuo tuohon tuoksi tuolla tuolle tuolta tuon tuona tuossa tuosta tuota tähän täksi tälle tällä tältä tämä tämän tänä tässä tästä tätä vaan vai vaikka yli".split(" ")),i.Pipeline.registerFunction(i.fi.stopWordFilter,"stopWordFilter-fi")}}); \ No newline at end of file diff --git a/docs/assets/javascripts/lunr/lunr.fr.js b/docs/assets/javascripts/lunr/lunr.fr.js new file mode 100644 index 000000000..3a9b9b177 --- /dev/null +++ b/docs/assets/javascripts/lunr/lunr.fr.js @@ -0,0 +1 @@ +!function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");var r,s,i;e.fr=function(){this.pipeline.reset(),this.pipeline.add(e.fr.trimmer,e.fr.stopWordFilter,e.fr.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.fr.stemmer))},e.fr.wordCharacters="A-Za-zªºÀ-ÖØ-öø-ʸˠ-ˤᴀ-ᴥᴬ-ᵜᵢ-ᵥᵫ-ᵷᵹ-ᶾḀ-ỿⁱⁿₐ-ₜKÅℲⅎⅠ-ↈⱠ-ⱿꜢ-ꞇꞋ-ꞭꞰ-ꞷꟷ-ꟿꬰ-ꭚꭜ-ꭤff-stA-Za-z",e.fr.trimmer=e.trimmerSupport.generateTrimmer(e.fr.wordCharacters),e.Pipeline.registerFunction(e.fr.trimmer,"trimmer-fr"),e.fr.stemmer=(r=e.stemmerSupport.Among,s=e.stemmerSupport.SnowballProgram,i=new function(){var e,i,n,t=[new r("col",-1,-1),new r("par",-1,-1),new r("tap",-1,-1)],u=[new r("",-1,4),new r("I",0,1),new r("U",0,2),new r("Y",0,3)],o=[new r("iqU",-1,3),new r("abl",-1,3),new r("Ièr",-1,4),new r("ièr",-1,4),new r("eus",-1,2),new r("iv",-1,1)],c=[new r("ic",-1,2),new r("abil",-1,1),new r("iv",-1,3)],a=[new r("iqUe",-1,1),new r("atrice",-1,2),new r("ance",-1,1),new r("ence",-1,5),new r("logie",-1,3),new r("able",-1,1),new r("isme",-1,1),new r("euse",-1,11),new r("iste",-1,1),new r("ive",-1,8),new r("if",-1,8),new r("usion",-1,4),new r("ation",-1,2),new r("ution",-1,4),new r("ateur",-1,2),new r("iqUes",-1,1),new r("atrices",-1,2),new r("ances",-1,1),new r("ences",-1,5),new r("logies",-1,3),new r("ables",-1,1),new r("ismes",-1,1),new r("euses",-1,11),new r("istes",-1,1),new r("ives",-1,8),new r("ifs",-1,8),new r("usions",-1,4),new r("ations",-1,2),new r("utions",-1,4),new r("ateurs",-1,2),new r("ments",-1,15),new r("ements",30,6),new r("issements",31,12),new r("ités",-1,7),new r("ment",-1,15),new r("ement",34,6),new r("issement",35,12),new r("amment",34,13),new r("emment",34,14),new r("aux",-1,10),new r("eaux",39,9),new r("eux",-1,1),new r("ité",-1,7)],l=[new r("ira",-1,1),new r("ie",-1,1),new r("isse",-1,1),new r("issante",-1,1),new r("i",-1,1),new r("irai",4,1),new r("ir",-1,1),new r("iras",-1,1),new r("ies",-1,1),new r("îmes",-1,1),new r("isses",-1,1),new r("issantes",-1,1),new r("îtes",-1,1),new r("is",-1,1),new r("irais",13,1),new r("issais",13,1),new r("irions",-1,1),new r("issions",-1,1),new r("irons",-1,1),new r("issons",-1,1),new r("issants",-1,1),new r("it",-1,1),new r("irait",21,1),new r("issait",21,1),new r("issant",-1,1),new r("iraIent",-1,1),new r("issaIent",-1,1),new r("irent",-1,1),new r("issent",-1,1),new r("iront",-1,1),new r("ît",-1,1),new r("iriez",-1,1),new r("issiez",-1,1),new r("irez",-1,1),new r("issez",-1,1)],w=[new r("a",-1,3),new r("era",0,2),new r("asse",-1,3),new r("ante",-1,3),new r("ée",-1,2),new r("ai",-1,3),new r("erai",5,2),new r("er",-1,2),new r("as",-1,3),new r("eras",8,2),new r("âmes",-1,3),new r("asses",-1,3),new r("antes",-1,3),new r("âtes",-1,3),new r("ées",-1,2),new r("ais",-1,3),new r("erais",15,2),new r("ions",-1,1),new r("erions",17,2),new r("assions",17,3),new r("erons",-1,2),new r("ants",-1,3),new r("és",-1,2),new r("ait",-1,3),new r("erait",23,2),new r("ant",-1,3),new r("aIent",-1,3),new r("eraIent",26,2),new r("èrent",-1,2),new r("assent",-1,3),new r("eront",-1,2),new r("ât",-1,3),new r("ez",-1,2),new r("iez",32,2),new r("eriez",33,2),new r("assiez",33,3),new r("erez",32,2),new r("é",-1,2)],f=[new r("e",-1,3),new r("Ière",0,2),new r("ière",0,2),new r("ion",-1,1),new r("Ier",-1,2),new r("ier",-1,2),new r("ë",-1,4)],m=[new r("ell",-1,-1),new r("eill",-1,-1),new r("enn",-1,-1),new r("onn",-1,-1),new r("ett",-1,-1)],_=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,128,130,103,8,5],b=[1,65,20,0,0,0,0,0,0,0,0,0,0,0,0,0,128],d=new s;function k(e,r,s){return!(!d.eq_s(1,e)||(d.ket=d.cursor,!d.in_grouping(_,97,251)))&&(d.slice_from(r),d.cursor=s,!0)}function p(e,r,s){return!!d.eq_s(1,e)&&(d.ket=d.cursor,d.slice_from(r),d.cursor=s,!0)}function g(){for(;!d.in_grouping(_,97,251);){if(d.cursor>=d.limit)return!0;d.cursor++}for(;!d.out_grouping(_,97,251);){if(d.cursor>=d.limit)return!0;d.cursor++}return!1}function q(){return n<=d.cursor}function v(){return i<=d.cursor}function h(){return e<=d.cursor}function z(){if(!function(){var e,r;if(d.ket=d.cursor,e=d.find_among_b(a,43)){switch(d.bra=d.cursor,e){case 1:if(!h())return!1;d.slice_del();break;case 2:if(!h())return!1;d.slice_del(),d.ket=d.cursor,d.eq_s_b(2,"ic")&&(d.bra=d.cursor,h()?d.slice_del():d.slice_from("iqU"));break;case 3:if(!h())return!1;d.slice_from("log");break;case 4:if(!h())return!1;d.slice_from("u");break;case 5:if(!h())return!1;d.slice_from("ent");break;case 6:if(!q())return!1;if(d.slice_del(),d.ket=d.cursor,e=d.find_among_b(o,6))switch(d.bra=d.cursor,e){case 1:h()&&(d.slice_del(),d.ket=d.cursor,d.eq_s_b(2,"at")&&(d.bra=d.cursor,h()&&d.slice_del()));break;case 2:h()?d.slice_del():v()&&d.slice_from("eux");break;case 3:h()&&d.slice_del();break;case 4:q()&&d.slice_from("i")}break;case 7:if(!h())return!1;if(d.slice_del(),d.ket=d.cursor,e=d.find_among_b(c,3))switch(d.bra=d.cursor,e){case 1:h()?d.slice_del():d.slice_from("abl");break;case 2:h()?d.slice_del():d.slice_from("iqU");break;case 3:h()&&d.slice_del()}break;case 8:if(!h())return!1;if(d.slice_del(),d.ket=d.cursor,d.eq_s_b(2,"at")&&(d.bra=d.cursor,h()&&(d.slice_del(),d.ket=d.cursor,d.eq_s_b(2,"ic")))){d.bra=d.cursor,h()?d.slice_del():d.slice_from("iqU");break}break;case 9:d.slice_from("eau");break;case 10:if(!v())return!1;d.slice_from("al");break;case 11:if(h())d.slice_del();else{if(!v())return!1;d.slice_from("eux")}break;case 12:if(!v()||!d.out_grouping_b(_,97,251))return!1;d.slice_del();break;case 13:return q()&&d.slice_from("ant"),!1;case 14:return q()&&d.slice_from("ent"),!1;case 15:return r=d.limit-d.cursor,d.in_grouping_b(_,97,251)&&q()&&(d.cursor=d.limit-r,d.slice_del()),!1}return!0}return!1}()&&(d.cursor=d.limit,!function(){var e,r;if(d.cursor=n){if(s=d.limit_backward,d.limit_backward=n,d.ket=d.cursor,e=d.find_among_b(f,7))switch(d.bra=d.cursor,e){case 1:if(h()){if(i=d.limit-d.cursor,!d.eq_s_b(1,"s")&&(d.cursor=d.limit-i,!d.eq_s_b(1,"t")))break;d.slice_del()}break;case 2:d.slice_from("i");break;case 3:d.slice_del();break;case 4:d.eq_s_b(2,"gu")&&d.slice_del()}d.limit_backward=s}}();d.cursor=d.limit,d.ket=d.cursor,d.eq_s_b(1,"Y")?(d.bra=d.cursor,d.slice_from("i")):(d.cursor=d.limit,d.eq_s_b(1,"ç")&&(d.bra=d.cursor,d.slice_from("c")))}this.setCurrent=function(e){d.setCurrent(e)},this.getCurrent=function(){return d.getCurrent()},this.stem=function(){var r,s=d.cursor;return function(){for(var e,r;;){if(e=d.cursor,d.in_grouping(_,97,251)){if(d.bra=d.cursor,r=d.cursor,k("u","U",e))continue;if(d.cursor=r,k("i","I",e))continue;if(d.cursor=r,p("y","Y",e))continue}if(d.cursor=e,d.bra=e,!k("y","Y",e)){if(d.cursor=e,d.eq_s(1,"q")&&(d.bra=d.cursor,p("u","U",e)))continue;if(d.cursor=e,e>=d.limit)return;d.cursor++}}}(),d.cursor=s,function(){var r=d.cursor;if(n=d.limit,i=n,e=n,d.in_grouping(_,97,251)&&d.in_grouping(_,97,251)&&d.cursor=d.limit){d.cursor=n;break}d.cursor++}while(!d.in_grouping(_,97,251))}n=d.cursor,d.cursor=r,g()||(i=d.cursor,g()||(e=d.cursor))}(),d.limit_backward=s,d.cursor=d.limit,z(),d.cursor=d.limit,r=d.limit-d.cursor,d.find_among_b(m,5)&&(d.cursor=d.limit-r,d.ket=d.cursor,d.cursor>d.limit_backward&&(d.cursor--,d.bra=d.cursor,d.slice_del())),d.cursor=d.limit,function(){for(var e,r=1;d.out_grouping_b(_,97,251);)r--;if(r<=0){if(d.ket=d.cursor,e=d.limit-d.cursor,!d.eq_s_b(1,"é")&&(d.cursor=d.limit-e,!d.eq_s_b(1,"è")))return;d.bra=d.cursor,d.slice_from("e")}}(),d.cursor=d.limit_backward,function(){for(var e,r;r=d.cursor,d.bra=r,e=d.find_among(u,4);)switch(d.ket=d.cursor,e){case 1:d.slice_from("i");break;case 2:d.slice_from("u");break;case 3:d.slice_from("y");break;case 4:if(d.cursor>=d.limit)return;d.cursor++}}(),!0}},function(e){return"function"==typeof e.update?e.update(function(e){return i.setCurrent(e),i.stem(),i.getCurrent()}):(i.setCurrent(e),i.stem(),i.getCurrent())}),e.Pipeline.registerFunction(e.fr.stemmer,"stemmer-fr"),e.fr.stopWordFilter=e.generateStopWordFilter("ai aie aient aies ait as au aura aurai auraient aurais aurait auras aurez auriez aurions aurons auront aux avaient avais avait avec avez aviez avions avons ayant ayez ayons c ce ceci celà ces cet cette d dans de des du elle en es est et eu eue eues eurent eus eusse eussent eusses eussiez eussions eut eux eûmes eût eûtes furent fus fusse fussent fusses fussiez fussions fut fûmes fût fûtes ici il ils j je l la le les leur leurs lui m ma mais me mes moi mon même n ne nos notre nous on ont ou par pas pour qu que quel quelle quelles quels qui s sa sans se sera serai seraient serais serait seras serez seriez serions serons seront ses soi soient sois soit sommes son sont soyez soyons suis sur t ta te tes toi ton tu un une vos votre vous y à étaient étais était étant étiez étions été étée étées étés êtes".split(" ")),e.Pipeline.registerFunction(e.fr.stopWordFilter,"stopWordFilter-fr")}}); \ No newline at end of file diff --git a/docs/assets/javascripts/lunr/lunr.hu.js b/docs/assets/javascripts/lunr/lunr.hu.js new file mode 100644 index 000000000..fa704a69c --- /dev/null +++ b/docs/assets/javascripts/lunr/lunr.hu.js @@ -0,0 +1 @@ +!function(e,n){"function"==typeof define&&define.amd?define(n):"object"==typeof exports?module.exports=n():n()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");var n,r,i;e.hu=function(){this.pipeline.reset(),this.pipeline.add(e.hu.trimmer,e.hu.stopWordFilter,e.hu.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.hu.stemmer))},e.hu.wordCharacters="A-Za-zªºÀ-ÖØ-öø-ʸˠ-ˤᴀ-ᴥᴬ-ᵜᵢ-ᵥᵫ-ᵷᵹ-ᶾḀ-ỿⁱⁿₐ-ₜKÅℲⅎⅠ-ↈⱠ-ⱿꜢ-ꞇꞋ-ꞭꞰ-ꞷꟷ-ꟿꬰ-ꭚꭜ-ꭤff-stA-Za-z",e.hu.trimmer=e.trimmerSupport.generateTrimmer(e.hu.wordCharacters),e.Pipeline.registerFunction(e.hu.trimmer,"trimmer-hu"),e.hu.stemmer=(n=e.stemmerSupport.Among,r=e.stemmerSupport.SnowballProgram,i=new function(){var e,i=[new n("cs",-1,-1),new n("dzs",-1,-1),new n("gy",-1,-1),new n("ly",-1,-1),new n("ny",-1,-1),new n("sz",-1,-1),new n("ty",-1,-1),new n("zs",-1,-1)],a=[new n("á",-1,1),new n("é",-1,2)],t=[new n("bb",-1,-1),new n("cc",-1,-1),new n("dd",-1,-1),new n("ff",-1,-1),new n("gg",-1,-1),new n("jj",-1,-1),new n("kk",-1,-1),new n("ll",-1,-1),new n("mm",-1,-1),new n("nn",-1,-1),new n("pp",-1,-1),new n("rr",-1,-1),new n("ccs",-1,-1),new n("ss",-1,-1),new n("zzs",-1,-1),new n("tt",-1,-1),new n("vv",-1,-1),new n("ggy",-1,-1),new n("lly",-1,-1),new n("nny",-1,-1),new n("tty",-1,-1),new n("ssz",-1,-1),new n("zz",-1,-1)],s=[new n("al",-1,1),new n("el",-1,2)],c=[new n("ba",-1,-1),new n("ra",-1,-1),new n("be",-1,-1),new n("re",-1,-1),new n("ig",-1,-1),new n("nak",-1,-1),new n("nek",-1,-1),new n("val",-1,-1),new n("vel",-1,-1),new n("ul",-1,-1),new n("nál",-1,-1),new n("nél",-1,-1),new n("ból",-1,-1),new n("ról",-1,-1),new n("tól",-1,-1),new n("bõl",-1,-1),new n("rõl",-1,-1),new n("tõl",-1,-1),new n("ül",-1,-1),new n("n",-1,-1),new n("an",19,-1),new n("ban",20,-1),new n("en",19,-1),new n("ben",22,-1),new n("képpen",22,-1),new n("on",19,-1),new n("ön",19,-1),new n("képp",-1,-1),new n("kor",-1,-1),new n("t",-1,-1),new n("at",29,-1),new n("et",29,-1),new n("ként",29,-1),new n("anként",32,-1),new n("enként",32,-1),new n("onként",32,-1),new n("ot",29,-1),new n("ért",29,-1),new n("öt",29,-1),new n("hez",-1,-1),new n("hoz",-1,-1),new n("höz",-1,-1),new n("vá",-1,-1),new n("vé",-1,-1)],w=[new n("án",-1,2),new n("én",-1,1),new n("ánként",-1,3)],o=[new n("stul",-1,2),new n("astul",0,1),new n("ástul",0,3),new n("stül",-1,2),new n("estül",3,1),new n("éstül",3,4)],l=[new n("á",-1,1),new n("é",-1,2)],u=[new n("k",-1,7),new n("ak",0,4),new n("ek",0,6),new n("ok",0,5),new n("ák",0,1),new n("ék",0,2),new n("ök",0,3)],m=[new n("éi",-1,7),new n("áéi",0,6),new n("ééi",0,5),new n("é",-1,9),new n("ké",3,4),new n("aké",4,1),new n("eké",4,1),new n("oké",4,1),new n("áké",4,3),new n("éké",4,2),new n("öké",4,1),new n("éé",3,8)],k=[new n("a",-1,18),new n("ja",0,17),new n("d",-1,16),new n("ad",2,13),new n("ed",2,13),new n("od",2,13),new n("ád",2,14),new n("éd",2,15),new n("öd",2,13),new n("e",-1,18),new n("je",9,17),new n("nk",-1,4),new n("unk",11,1),new n("ánk",11,2),new n("énk",11,3),new n("ünk",11,1),new n("uk",-1,8),new n("juk",16,7),new n("ájuk",17,5),new n("ük",-1,8),new n("jük",19,7),new n("éjük",20,6),new n("m",-1,12),new n("am",22,9),new n("em",22,9),new n("om",22,9),new n("ám",22,10),new n("ém",22,11),new n("o",-1,18),new n("á",-1,19),new n("é",-1,20)],f=[new n("id",-1,10),new n("aid",0,9),new n("jaid",1,6),new n("eid",0,9),new n("jeid",3,6),new n("áid",0,7),new n("éid",0,8),new n("i",-1,15),new n("ai",7,14),new n("jai",8,11),new n("ei",7,14),new n("jei",10,11),new n("ái",7,12),new n("éi",7,13),new n("itek",-1,24),new n("eitek",14,21),new n("jeitek",15,20),new n("éitek",14,23),new n("ik",-1,29),new n("aik",18,26),new n("jaik",19,25),new n("eik",18,26),new n("jeik",21,25),new n("áik",18,27),new n("éik",18,28),new n("ink",-1,20),new n("aink",25,17),new n("jaink",26,16),new n("eink",25,17),new n("jeink",28,16),new n("áink",25,18),new n("éink",25,19),new n("aitok",-1,21),new n("jaitok",32,20),new n("áitok",-1,22),new n("im",-1,5),new n("aim",35,4),new n("jaim",36,1),new n("eim",35,4),new n("jeim",38,1),new n("áim",35,2),new n("éim",35,3)],b=[17,65,16,0,0,0,0,0,0,0,0,0,0,0,0,0,1,17,52,14],d=new r;function g(){return e<=d.cursor}function h(){var e=d.limit-d.cursor;return!!d.find_among_b(t,23)&&(d.cursor=d.limit-e,!0)}function p(){if(d.cursor>d.limit_backward){d.cursor--,d.ket=d.cursor;var e=d.cursor-1;d.limit_backward<=e&&e<=d.limit&&(d.cursor=e,d.bra=e,d.slice_del())}}function _(){d.ket=d.cursor,d.find_among_b(c,44)&&(d.bra=d.cursor,g()&&(d.slice_del(),function(){var e;if(d.ket=d.cursor,(e=d.find_among_b(a,2))&&(d.bra=d.cursor,g()))switch(e){case 1:d.slice_from("a");break;case 2:d.slice_from("e")}}()))}this.setCurrent=function(e){d.setCurrent(e)},this.getCurrent=function(){return d.getCurrent()},this.stem=function(){var n=d.cursor;return function(){var n,r=d.cursor;if(e=d.limit,d.in_grouping(b,97,252))for(;;){if(n=d.cursor,d.out_grouping(b,97,252))return d.cursor=n,d.find_among(i,8)||(d.cursor=n,n=d.limit)return void(e=n);d.cursor++}if(d.cursor=r,d.out_grouping(b,97,252)){for(;!d.in_grouping(b,97,252);){if(d.cursor>=d.limit)return;d.cursor++}e=d.cursor}}(),d.limit_backward=n,d.cursor=d.limit,function(){var e;if(d.ket=d.cursor,(e=d.find_among_b(s,2))&&(d.bra=d.cursor,g())){if((1==e||2==e)&&!h())return;d.slice_del(),p()}}(),d.cursor=d.limit,_(),d.cursor=d.limit,function(){var e;if(d.ket=d.cursor,(e=d.find_among_b(w,3))&&(d.bra=d.cursor,g()))switch(e){case 1:d.slice_from("e");break;case 2:case 3:d.slice_from("a")}}(),d.cursor=d.limit,function(){var e;if(d.ket=d.cursor,(e=d.find_among_b(o,6))&&(d.bra=d.cursor,g()))switch(e){case 1:case 2:d.slice_del();break;case 3:d.slice_from("a");break;case 4:d.slice_from("e")}}(),d.cursor=d.limit,function(){var e;if(d.ket=d.cursor,(e=d.find_among_b(l,2))&&(d.bra=d.cursor,g())){if((1==e||2==e)&&!h())return;d.slice_del(),p()}}(),d.cursor=d.limit,function(){var e;if(d.ket=d.cursor,(e=d.find_among_b(m,12))&&(d.bra=d.cursor,g()))switch(e){case 1:case 4:case 7:case 9:d.slice_del();break;case 2:case 5:case 8:d.slice_from("e");break;case 3:case 6:d.slice_from("a")}}(),d.cursor=d.limit,function(){var e;if(d.ket=d.cursor,(e=d.find_among_b(k,31))&&(d.bra=d.cursor,g()))switch(e){case 1:case 4:case 7:case 8:case 9:case 12:case 13:case 16:case 17:case 18:d.slice_del();break;case 2:case 5:case 10:case 14:case 19:d.slice_from("a");break;case 3:case 6:case 11:case 15:case 20:d.slice_from("e")}}(),d.cursor=d.limit,function(){var e;if(d.ket=d.cursor,(e=d.find_among_b(f,42))&&(d.bra=d.cursor,g()))switch(e){case 1:case 4:case 5:case 6:case 9:case 10:case 11:case 14:case 15:case 16:case 17:case 20:case 21:case 24:case 25:case 26:case 29:d.slice_del();break;case 2:case 7:case 12:case 18:case 22:case 27:d.slice_from("a");break;case 3:case 8:case 13:case 19:case 23:case 28:d.slice_from("e")}}(),d.cursor=d.limit,function(){var e;if(d.ket=d.cursor,(e=d.find_among_b(u,7))&&(d.bra=d.cursor,g()))switch(e){case 1:d.slice_from("a");break;case 2:d.slice_from("e");break;case 3:case 4:case 5:case 6:case 7:d.slice_del()}}(),!0}},function(e){return"function"==typeof e.update?e.update(function(e){return i.setCurrent(e),i.stem(),i.getCurrent()}):(i.setCurrent(e),i.stem(),i.getCurrent())}),e.Pipeline.registerFunction(e.hu.stemmer,"stemmer-hu"),e.hu.stopWordFilter=e.generateStopWordFilter("a abban ahhoz ahogy ahol aki akik akkor alatt amely amelyek amelyekben amelyeket amelyet amelynek ami amikor amit amolyan amíg annak arra arról az azok azon azonban azt aztán azután azzal azért be belül benne bár cikk cikkek cikkeket csak de e ebben eddig egy egyes egyetlen egyik egyre egyéb egész ehhez ekkor el ellen elsõ elég elõ elõször elõtt emilyen ennek erre ez ezek ezen ezt ezzel ezért fel felé hanem hiszen hogy hogyan igen ill ill. illetve ilyen ilyenkor ismét ison itt jobban jó jól kell kellett keressünk keresztül ki kívül között közül legalább legyen lehet lehetett lenne lenni lesz lett maga magát majd majd meg mellett mely melyek mert mi mikor milyen minden mindenki mindent mindig mint mintha mit mivel miért most már más másik még míg nagy nagyobb nagyon ne nekem neki nem nincs néha néhány nélkül olyan ott pedig persze rá s saját sem semmi sok sokat sokkal szemben szerint szinte számára talán tehát teljes tovább továbbá több ugyanis utolsó után utána vagy vagyis vagyok valaki valami valamint való van vannak vele vissza viszont volna volt voltak voltam voltunk által általában át én éppen és így õ õk õket össze úgy új újabb újra".split(" ")),e.Pipeline.registerFunction(e.hu.stopWordFilter,"stopWordFilter-hu")}}); \ No newline at end of file diff --git a/docs/assets/javascripts/lunr/lunr.it.js b/docs/assets/javascripts/lunr/lunr.it.js new file mode 100644 index 000000000..293073389 --- /dev/null +++ b/docs/assets/javascripts/lunr/lunr.it.js @@ -0,0 +1 @@ +!function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");var r,i,n;e.it=function(){this.pipeline.reset(),this.pipeline.add(e.it.trimmer,e.it.stopWordFilter,e.it.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.it.stemmer))},e.it.wordCharacters="A-Za-zªºÀ-ÖØ-öø-ʸˠ-ˤᴀ-ᴥᴬ-ᵜᵢ-ᵥᵫ-ᵷᵹ-ᶾḀ-ỿⁱⁿₐ-ₜKÅℲⅎⅠ-ↈⱠ-ⱿꜢ-ꞇꞋ-ꞭꞰ-ꞷꟷ-ꟿꬰ-ꭚꭜ-ꭤff-stA-Za-z",e.it.trimmer=e.trimmerSupport.generateTrimmer(e.it.wordCharacters),e.Pipeline.registerFunction(e.it.trimmer,"trimmer-it"),e.it.stemmer=(r=e.stemmerSupport.Among,i=e.stemmerSupport.SnowballProgram,n=new function(){var e,n,o,t=[new r("",-1,7),new r("qu",0,6),new r("á",0,1),new r("é",0,2),new r("í",0,3),new r("ó",0,4),new r("ú",0,5)],s=[new r("",-1,3),new r("I",0,1),new r("U",0,2)],a=[new r("la",-1,-1),new r("cela",0,-1),new r("gliela",0,-1),new r("mela",0,-1),new r("tela",0,-1),new r("vela",0,-1),new r("le",-1,-1),new r("cele",6,-1),new r("gliele",6,-1),new r("mele",6,-1),new r("tele",6,-1),new r("vele",6,-1),new r("ne",-1,-1),new r("cene",12,-1),new r("gliene",12,-1),new r("mene",12,-1),new r("sene",12,-1),new r("tene",12,-1),new r("vene",12,-1),new r("ci",-1,-1),new r("li",-1,-1),new r("celi",20,-1),new r("glieli",20,-1),new r("meli",20,-1),new r("teli",20,-1),new r("veli",20,-1),new r("gli",20,-1),new r("mi",-1,-1),new r("si",-1,-1),new r("ti",-1,-1),new r("vi",-1,-1),new r("lo",-1,-1),new r("celo",31,-1),new r("glielo",31,-1),new r("melo",31,-1),new r("telo",31,-1),new r("velo",31,-1)],u=[new r("ando",-1,1),new r("endo",-1,1),new r("ar",-1,2),new r("er",-1,2),new r("ir",-1,2)],c=[new r("ic",-1,-1),new r("abil",-1,-1),new r("os",-1,-1),new r("iv",-1,1)],w=[new r("ic",-1,1),new r("abil",-1,1),new r("iv",-1,1)],l=[new r("ica",-1,1),new r("logia",-1,3),new r("osa",-1,1),new r("ista",-1,1),new r("iva",-1,9),new r("anza",-1,1),new r("enza",-1,5),new r("ice",-1,1),new r("atrice",7,1),new r("iche",-1,1),new r("logie",-1,3),new r("abile",-1,1),new r("ibile",-1,1),new r("usione",-1,4),new r("azione",-1,2),new r("uzione",-1,4),new r("atore",-1,2),new r("ose",-1,1),new r("ante",-1,1),new r("mente",-1,1),new r("amente",19,7),new r("iste",-1,1),new r("ive",-1,9),new r("anze",-1,1),new r("enze",-1,5),new r("ici",-1,1),new r("atrici",25,1),new r("ichi",-1,1),new r("abili",-1,1),new r("ibili",-1,1),new r("ismi",-1,1),new r("usioni",-1,4),new r("azioni",-1,2),new r("uzioni",-1,4),new r("atori",-1,2),new r("osi",-1,1),new r("anti",-1,1),new r("amenti",-1,6),new r("imenti",-1,6),new r("isti",-1,1),new r("ivi",-1,9),new r("ico",-1,1),new r("ismo",-1,1),new r("oso",-1,1),new r("amento",-1,6),new r("imento",-1,6),new r("ivo",-1,9),new r("ità",-1,8),new r("istà",-1,1),new r("istè",-1,1),new r("istì",-1,1)],m=[new r("isca",-1,1),new r("enda",-1,1),new r("ata",-1,1),new r("ita",-1,1),new r("uta",-1,1),new r("ava",-1,1),new r("eva",-1,1),new r("iva",-1,1),new r("erebbe",-1,1),new r("irebbe",-1,1),new r("isce",-1,1),new r("ende",-1,1),new r("are",-1,1),new r("ere",-1,1),new r("ire",-1,1),new r("asse",-1,1),new r("ate",-1,1),new r("avate",16,1),new r("evate",16,1),new r("ivate",16,1),new r("ete",-1,1),new r("erete",20,1),new r("irete",20,1),new r("ite",-1,1),new r("ereste",-1,1),new r("ireste",-1,1),new r("ute",-1,1),new r("erai",-1,1),new r("irai",-1,1),new r("isci",-1,1),new r("endi",-1,1),new r("erei",-1,1),new r("irei",-1,1),new r("assi",-1,1),new r("ati",-1,1),new r("iti",-1,1),new r("eresti",-1,1),new r("iresti",-1,1),new r("uti",-1,1),new r("avi",-1,1),new r("evi",-1,1),new r("ivi",-1,1),new r("isco",-1,1),new r("ando",-1,1),new r("endo",-1,1),new r("Yamo",-1,1),new r("iamo",-1,1),new r("avamo",-1,1),new r("evamo",-1,1),new r("ivamo",-1,1),new r("eremo",-1,1),new r("iremo",-1,1),new r("assimo",-1,1),new r("ammo",-1,1),new r("emmo",-1,1),new r("eremmo",54,1),new r("iremmo",54,1),new r("immo",-1,1),new r("ano",-1,1),new r("iscano",58,1),new r("avano",58,1),new r("evano",58,1),new r("ivano",58,1),new r("eranno",-1,1),new r("iranno",-1,1),new r("ono",-1,1),new r("iscono",65,1),new r("arono",65,1),new r("erono",65,1),new r("irono",65,1),new r("erebbero",-1,1),new r("irebbero",-1,1),new r("assero",-1,1),new r("essero",-1,1),new r("issero",-1,1),new r("ato",-1,1),new r("ito",-1,1),new r("uto",-1,1),new r("avo",-1,1),new r("evo",-1,1),new r("ivo",-1,1),new r("ar",-1,1),new r("ir",-1,1),new r("erà",-1,1),new r("irà",-1,1),new r("erò",-1,1),new r("irò",-1,1)],f=[17,65,16,0,0,0,0,0,0,0,0,0,0,0,0,128,128,8,2,1],v=[17,65,0,0,0,0,0,0,0,0,0,0,0,0,0,128,128,8,2],b=[17],d=new i;function _(e,r,i){return!(!d.eq_s(1,e)||(d.ket=d.cursor,!d.in_grouping(f,97,249)))&&(d.slice_from(r),d.cursor=i,!0)}function g(e){if(d.cursor=e,!d.in_grouping(f,97,249))return!1;for(;!d.out_grouping(f,97,249);){if(d.cursor>=d.limit)return!1;d.cursor++}return!0}function p(){var e,r=d.cursor;if(!function(){if(d.in_grouping(f,97,249)){var e=d.cursor;if(d.out_grouping(f,97,249)){for(;!d.in_grouping(f,97,249);){if(d.cursor>=d.limit)return g(e);d.cursor++}return!0}return g(e)}return!1}()){if(d.cursor=r,!d.out_grouping(f,97,249))return;if(e=d.cursor,d.out_grouping(f,97,249)){for(;!d.in_grouping(f,97,249);){if(d.cursor>=d.limit)return d.cursor=e,void(d.in_grouping(f,97,249)&&d.cursor=d.limit)return;d.cursor++}o=d.cursor}function k(){for(;!d.in_grouping(f,97,249);){if(d.cursor>=d.limit)return!1;d.cursor++}for(;!d.out_grouping(f,97,249);){if(d.cursor>=d.limit)return!1;d.cursor++}return!0}function h(){return o<=d.cursor}function q(){return e<=d.cursor}function C(){var e;if(d.ket=d.cursor,!(e=d.find_among_b(l,51)))return!1;switch(d.bra=d.cursor,e){case 1:if(!q())return!1;d.slice_del();break;case 2:if(!q())return!1;d.slice_del(),d.ket=d.cursor,d.eq_s_b(2,"ic")&&(d.bra=d.cursor,q()&&d.slice_del());break;case 3:if(!q())return!1;d.slice_from("log");break;case 4:if(!q())return!1;d.slice_from("u");break;case 5:if(!q())return!1;d.slice_from("ente");break;case 6:if(!h())return!1;d.slice_del();break;case 7:if(!(n<=d.cursor))return!1;d.slice_del(),d.ket=d.cursor,(e=d.find_among_b(c,4))&&(d.bra=d.cursor,q()&&(d.slice_del(),1==e&&(d.ket=d.cursor,d.eq_s_b(2,"at")&&(d.bra=d.cursor,q()&&d.slice_del()))));break;case 8:if(!q())return!1;d.slice_del(),d.ket=d.cursor,(e=d.find_among_b(w,3))&&(d.bra=d.cursor,1==e&&q()&&d.slice_del());break;case 9:if(!q())return!1;d.slice_del(),d.ket=d.cursor,d.eq_s_b(2,"at")&&(d.bra=d.cursor,q()&&(d.slice_del(),d.ket=d.cursor,d.eq_s_b(2,"ic")&&(d.bra=d.cursor,q()&&d.slice_del())))}return!0}function z(){var e;e=d.limit-d.cursor,d.ket=d.cursor,d.in_grouping_b(v,97,242)&&(d.bra=d.cursor,h()&&(d.slice_del(),d.ket=d.cursor,d.eq_s_b(1,"i")&&(d.bra=d.cursor,h())))?d.slice_del():d.cursor=d.limit-e,d.ket=d.cursor,d.eq_s_b(1,"h")&&(d.bra=d.cursor,d.in_grouping_b(b,99,103)&&h()&&d.slice_del())}this.setCurrent=function(e){d.setCurrent(e)},this.getCurrent=function(){return d.getCurrent()},this.stem=function(){var r,i,c,w=d.cursor;return function(){for(var e,r,i,n,o=d.cursor;;){if(d.bra=d.cursor,e=d.find_among(t,7))switch(d.ket=d.cursor,e){case 1:d.slice_from("à");continue;case 2:d.slice_from("è");continue;case 3:d.slice_from("ì");continue;case 4:d.slice_from("ò");continue;case 5:d.slice_from("ù");continue;case 6:d.slice_from("qU");continue;case 7:if(d.cursor>=d.limit)break;d.cursor++;continue}break}for(d.cursor=o;;)for(r=d.cursor;;){if(i=d.cursor,d.in_grouping(f,97,249)){if(d.bra=d.cursor,n=d.cursor,_("u","U",i))break;if(d.cursor=n,_("i","I",i))break}if(d.cursor=i,d.cursor>=d.limit)return void(d.cursor=r);d.cursor++}}(),d.cursor=w,r=d.cursor,o=d.limit,n=o,e=o,p(),d.cursor=r,k()&&(n=d.cursor,k()&&(e=d.cursor)),d.limit_backward=w,d.cursor=d.limit,function(){var e;if(d.ket=d.cursor,d.find_among_b(a,37)&&(d.bra=d.cursor,(e=d.find_among_b(u,5))&&h()))switch(e){case 1:d.slice_del();break;case 2:d.slice_from("e")}}(),d.cursor=d.limit,C()||(d.cursor=d.limit,d.cursor>=o&&(c=d.limit_backward,d.limit_backward=o,d.ket=d.cursor,(i=d.find_among_b(m,87))&&(d.bra=d.cursor,1==i&&d.slice_del()),d.limit_backward=c)),d.cursor=d.limit,z(),d.cursor=d.limit_backward,function(){for(var e;d.bra=d.cursor,e=d.find_among(s,3);)switch(d.ket=d.cursor,e){case 1:d.slice_from("i");break;case 2:d.slice_from("u");break;case 3:if(d.cursor>=d.limit)return;d.cursor++}}(),!0}},function(e){return"function"==typeof e.update?e.update(function(e){return n.setCurrent(e),n.stem(),n.getCurrent()}):(n.setCurrent(e),n.stem(),n.getCurrent())}),e.Pipeline.registerFunction(e.it.stemmer,"stemmer-it"),e.it.stopWordFilter=e.generateStopWordFilter("a abbia abbiamo abbiano abbiate ad agl agli ai al all alla alle allo anche avemmo avendo avesse avessero avessi avessimo aveste avesti avete aveva avevamo avevano avevate avevi avevo avrai avranno avrebbe avrebbero avrei avremmo avremo avreste avresti avrete avrà avrò avuta avute avuti avuto c che chi ci coi col come con contro cui da dagl dagli dai dal dall dalla dalle dallo degl degli dei del dell della delle dello di dov dove e ebbe ebbero ebbi ed era erano eravamo eravate eri ero essendo faccia facciamo facciano facciate faccio facemmo facendo facesse facessero facessi facessimo faceste facesti faceva facevamo facevano facevate facevi facevo fai fanno farai faranno farebbe farebbero farei faremmo faremo fareste faresti farete farà farò fece fecero feci fosse fossero fossi fossimo foste fosti fu fui fummo furono gli ha hai hanno ho i il in io l la le lei li lo loro lui ma mi mia mie miei mio ne negl negli nei nel nell nella nelle nello noi non nostra nostre nostri nostro o per perché più quale quanta quante quanti quanto quella quelle quelli quello questa queste questi questo sarai saranno sarebbe sarebbero sarei saremmo saremo sareste saresti sarete sarà sarò se sei si sia siamo siano siate siete sono sta stai stando stanno starai staranno starebbe starebbero starei staremmo staremo stareste staresti starete starà starò stava stavamo stavano stavate stavi stavo stemmo stesse stessero stessi stessimo steste stesti stette stettero stetti stia stiamo stiano stiate sto su sua sue sugl sugli sui sul sull sulla sulle sullo suo suoi ti tra tu tua tue tuo tuoi tutti tutto un una uno vi voi vostra vostre vostri vostro è".split(" ")),e.Pipeline.registerFunction(e.it.stopWordFilter,"stopWordFilter-it")}}); \ No newline at end of file diff --git a/docs/assets/javascripts/lunr/lunr.jp.js b/docs/assets/javascripts/lunr/lunr.jp.js new file mode 100644 index 000000000..a33c3c71c --- /dev/null +++ b/docs/assets/javascripts/lunr/lunr.jp.js @@ -0,0 +1 @@ +!function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");var r="2"==e.version[0];e.jp=function(){this.pipeline.reset(),this.pipeline.add(e.jp.stopWordFilter,e.jp.stemmer),r?this.tokenizer=e.jp.tokenizer:(e.tokenizer&&(e.tokenizer=e.jp.tokenizer),this.tokenizerFn&&(this.tokenizerFn=e.jp.tokenizer))};var t=new e.TinySegmenter;e.jp.tokenizer=function(n){if(!arguments.length||null==n||null==n)return[];if(Array.isArray(n))return n.map(function(t){return r?new e.Token(t.toLowerCase()):t.toLowerCase()});for(var i=n.toString().toLowerCase().replace(/^\s+/,""),o=i.length-1;o>=0;o--)if(/\S/.test(i.charAt(o))){i=i.substring(0,o+1);break}return t.segment(i).filter(function(e){return!!e}).map(function(t){return r?new e.Token(t):t})},e.jp.stemmer=function(e){return e},e.Pipeline.registerFunction(e.jp.stemmer,"stemmer-jp"),e.jp.wordCharacters="一二三四五六七八九十百千万億兆一-龠々〆ヵヶぁ-んァ-ヴーア-ン゙a-zA-Za-zA-Z0-90-9",e.jp.stopWordFilter=function(t){if(-1===e.jp.stopWordFilter.stopWords.indexOf(r?t.toString():t))return t},e.jp.stopWordFilter=e.generateStopWordFilter("これ それ あれ この その あの ここ そこ あそこ こちら どこ だれ なに なん 何 私 貴方 貴方方 我々 私達 あの人 あのかた 彼女 彼 です あります おります います は が の に を で え から まで より も どの と し それで しかし".split(" ")),e.Pipeline.registerFunction(e.jp.stopWordFilter,"stopWordFilter-jp")}}); \ No newline at end of file diff --git a/docs/assets/javascripts/lunr/lunr.multi.js b/docs/assets/javascripts/lunr/lunr.multi.js new file mode 100644 index 000000000..d3dbc860c --- /dev/null +++ b/docs/assets/javascripts/lunr/lunr.multi.js @@ -0,0 +1 @@ +!function(e,i){"function"==typeof define&&define.amd?define(i):"object"==typeof exports?module.exports=i():i()(e.lunr)}(this,function(){return function(e){e.multiLanguage=function(){for(var i=Array.prototype.slice.call(arguments),t=i.join("-"),r="",n=[],s=[],p=0;p=l.limit)return;l.cursor=r+1}for(;!l.out_grouping(a,97,248);){if(l.cursor>=l.limit)return;l.cursor++}(i=l.cursor)=i&&(r=l.limit_backward,l.limit_backward=i,l.ket=l.cursor,e=l.find_among_b(t,29),l.limit_backward=r,e))switch(l.bra=l.cursor,e){case 1:l.slice_del();break;case 2:n=l.limit-l.cursor,l.in_grouping_b(m,98,122)?l.slice_del():(l.cursor=l.limit-n,l.eq_s_b(1,"k")&&l.out_grouping_b(a,97,248)&&l.slice_del());break;case 3:l.slice_from("er")}}(),l.cursor=l.limit,n=l.limit-l.cursor,l.cursor>=i&&(r=l.limit_backward,l.limit_backward=i,l.ket=l.cursor,l.find_among_b(o,2)?(l.bra=l.cursor,l.limit_backward=r,l.cursor=l.limit-n,l.cursor>l.limit_backward&&(l.cursor--,l.bra=l.cursor,l.slice_del())):l.limit_backward=r),l.cursor=l.limit,l.cursor>=i&&(d=l.limit_backward,l.limit_backward=i,l.ket=l.cursor,(u=l.find_among_b(s,11))?(l.bra=l.cursor,l.limit_backward=d,1==u&&l.slice_del()):l.limit_backward=d),!0}},function(e){return"function"==typeof e.update?e.update(function(e){return i.setCurrent(e),i.stem(),i.getCurrent()}):(i.setCurrent(e),i.stem(),i.getCurrent())}),e.Pipeline.registerFunction(e.no.stemmer,"stemmer-no"),e.no.stopWordFilter=e.generateStopWordFilter("alle at av bare begge ble blei bli blir blitt både båe da de deg dei deim deira deires dem den denne der dere deres det dette di din disse ditt du dykk dykkar då eg ein eit eitt eller elles en enn er et ett etter for fordi fra før ha hadde han hans har hennar henne hennes her hjå ho hoe honom hoss hossen hun hva hvem hver hvilke hvilken hvis hvor hvordan hvorfor i ikke ikkje ikkje ingen ingi inkje inn inni ja jeg kan kom korleis korso kun kunne kva kvar kvarhelst kven kvi kvifor man mange me med medan meg meget mellom men mi min mine mitt mot mykje ned no noe noen noka noko nokon nokor nokre nå når og også om opp oss over på samme seg selv si si sia sidan siden sin sine sitt sjøl skal skulle slik so som som somme somt så sånn til um upp ut uten var vart varte ved vere verte vi vil ville vore vors vort vår være være vært å".split(" ")),e.Pipeline.registerFunction(e.no.stopWordFilter,"stopWordFilter-no")}}); \ No newline at end of file diff --git a/docs/assets/javascripts/lunr/lunr.pt.js b/docs/assets/javascripts/lunr/lunr.pt.js new file mode 100644 index 000000000..51035c969 --- /dev/null +++ b/docs/assets/javascripts/lunr/lunr.pt.js @@ -0,0 +1 @@ +!function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");var r,s,n;e.pt=function(){this.pipeline.reset(),this.pipeline.add(e.pt.trimmer,e.pt.stopWordFilter,e.pt.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.pt.stemmer))},e.pt.wordCharacters="A-Za-zªºÀ-ÖØ-öø-ʸˠ-ˤᴀ-ᴥᴬ-ᵜᵢ-ᵥᵫ-ᵷᵹ-ᶾḀ-ỿⁱⁿₐ-ₜKÅℲⅎⅠ-ↈⱠ-ⱿꜢ-ꞇꞋ-ꞭꞰ-ꞷꟷ-ꟿꬰ-ꭚꭜ-ꭤff-stA-Za-z",e.pt.trimmer=e.trimmerSupport.generateTrimmer(e.pt.wordCharacters),e.Pipeline.registerFunction(e.pt.trimmer,"trimmer-pt"),e.pt.stemmer=(r=e.stemmerSupport.Among,s=e.stemmerSupport.SnowballProgram,n=new function(){var e,n,i,o=[new r("",-1,3),new r("ã",0,1),new r("õ",0,2)],a=[new r("",-1,3),new r("a~",0,1),new r("o~",0,2)],t=[new r("ic",-1,-1),new r("ad",-1,-1),new r("os",-1,-1),new r("iv",-1,1)],u=[new r("ante",-1,1),new r("avel",-1,1),new r("ível",-1,1)],w=[new r("ic",-1,1),new r("abil",-1,1),new r("iv",-1,1)],m=[new r("ica",-1,1),new r("ância",-1,1),new r("ência",-1,4),new r("ira",-1,9),new r("adora",-1,1),new r("osa",-1,1),new r("ista",-1,1),new r("iva",-1,8),new r("eza",-1,1),new r("logía",-1,2),new r("idade",-1,7),new r("ante",-1,1),new r("mente",-1,6),new r("amente",12,5),new r("ável",-1,1),new r("ível",-1,1),new r("ución",-1,3),new r("ico",-1,1),new r("ismo",-1,1),new r("oso",-1,1),new r("amento",-1,1),new r("imento",-1,1),new r("ivo",-1,8),new r("aça~o",-1,1),new r("ador",-1,1),new r("icas",-1,1),new r("ências",-1,4),new r("iras",-1,9),new r("adoras",-1,1),new r("osas",-1,1),new r("istas",-1,1),new r("ivas",-1,8),new r("ezas",-1,1),new r("logías",-1,2),new r("idades",-1,7),new r("uciones",-1,3),new r("adores",-1,1),new r("antes",-1,1),new r("aço~es",-1,1),new r("icos",-1,1),new r("ismos",-1,1),new r("osos",-1,1),new r("amentos",-1,1),new r("imentos",-1,1),new r("ivos",-1,8)],c=[new r("ada",-1,1),new r("ida",-1,1),new r("ia",-1,1),new r("aria",2,1),new r("eria",2,1),new r("iria",2,1),new r("ara",-1,1),new r("era",-1,1),new r("ira",-1,1),new r("ava",-1,1),new r("asse",-1,1),new r("esse",-1,1),new r("isse",-1,1),new r("aste",-1,1),new r("este",-1,1),new r("iste",-1,1),new r("ei",-1,1),new r("arei",16,1),new r("erei",16,1),new r("irei",16,1),new r("am",-1,1),new r("iam",20,1),new r("ariam",21,1),new r("eriam",21,1),new r("iriam",21,1),new r("aram",20,1),new r("eram",20,1),new r("iram",20,1),new r("avam",20,1),new r("em",-1,1),new r("arem",29,1),new r("erem",29,1),new r("irem",29,1),new r("assem",29,1),new r("essem",29,1),new r("issem",29,1),new r("ado",-1,1),new r("ido",-1,1),new r("ando",-1,1),new r("endo",-1,1),new r("indo",-1,1),new r("ara~o",-1,1),new r("era~o",-1,1),new r("ira~o",-1,1),new r("ar",-1,1),new r("er",-1,1),new r("ir",-1,1),new r("as",-1,1),new r("adas",47,1),new r("idas",47,1),new r("ias",47,1),new r("arias",50,1),new r("erias",50,1),new r("irias",50,1),new r("aras",47,1),new r("eras",47,1),new r("iras",47,1),new r("avas",47,1),new r("es",-1,1),new r("ardes",58,1),new r("erdes",58,1),new r("irdes",58,1),new r("ares",58,1),new r("eres",58,1),new r("ires",58,1),new r("asses",58,1),new r("esses",58,1),new r("isses",58,1),new r("astes",58,1),new r("estes",58,1),new r("istes",58,1),new r("is",-1,1),new r("ais",71,1),new r("eis",71,1),new r("areis",73,1),new r("ereis",73,1),new r("ireis",73,1),new r("áreis",73,1),new r("éreis",73,1),new r("íreis",73,1),new r("ásseis",73,1),new r("ésseis",73,1),new r("ísseis",73,1),new r("áveis",73,1),new r("íeis",73,1),new r("aríeis",84,1),new r("eríeis",84,1),new r("iríeis",84,1),new r("ados",-1,1),new r("idos",-1,1),new r("amos",-1,1),new r("áramos",90,1),new r("éramos",90,1),new r("íramos",90,1),new r("ávamos",90,1),new r("íamos",90,1),new r("aríamos",95,1),new r("eríamos",95,1),new r("iríamos",95,1),new r("emos",-1,1),new r("aremos",99,1),new r("eremos",99,1),new r("iremos",99,1),new r("ássemos",99,1),new r("êssemos",99,1),new r("íssemos",99,1),new r("imos",-1,1),new r("armos",-1,1),new r("ermos",-1,1),new r("irmos",-1,1),new r("ámos",-1,1),new r("arás",-1,1),new r("erás",-1,1),new r("irás",-1,1),new r("eu",-1,1),new r("iu",-1,1),new r("ou",-1,1),new r("ará",-1,1),new r("erá",-1,1),new r("irá",-1,1)],l=[new r("a",-1,1),new r("i",-1,1),new r("o",-1,1),new r("os",-1,1),new r("á",-1,1),new r("í",-1,1),new r("ó",-1,1)],f=[new r("e",-1,1),new r("ç",-1,2),new r("é",-1,1),new r("ê",-1,1)],d=[17,65,16,0,0,0,0,0,0,0,0,0,0,0,0,0,3,19,12,2],v=new s;function p(){if(v.out_grouping(d,97,250)){for(;!v.in_grouping(d,97,250);){if(v.cursor>=v.limit)return!0;v.cursor++}return!1}return!0}function _(){var e,r,s=v.cursor;if(v.in_grouping(d,97,250))if(e=v.cursor,p()){if(v.cursor=e,function(){if(v.in_grouping(d,97,250))for(;!v.out_grouping(d,97,250);){if(v.cursor>=v.limit)return!1;v.cursor++}return i=v.cursor,!0}())return}else i=v.cursor;if(v.cursor=s,v.out_grouping(d,97,250)){if(r=v.cursor,p()){if(v.cursor=r,!v.in_grouping(d,97,250)||v.cursor>=v.limit)return;v.cursor++}i=v.cursor}}function h(){for(;!v.in_grouping(d,97,250);){if(v.cursor>=v.limit)return!1;v.cursor++}for(;!v.out_grouping(d,97,250);){if(v.cursor>=v.limit)return!1;v.cursor++}return!0}function b(){return i<=v.cursor}function g(){return e<=v.cursor}function k(){var e;if(v.ket=v.cursor,!(e=v.find_among_b(m,45)))return!1;switch(v.bra=v.cursor,e){case 1:if(!g())return!1;v.slice_del();break;case 2:if(!g())return!1;v.slice_from("log");break;case 3:if(!g())return!1;v.slice_from("u");break;case 4:if(!g())return!1;v.slice_from("ente");break;case 5:if(!(n<=v.cursor))return!1;v.slice_del(),v.ket=v.cursor,(e=v.find_among_b(t,4))&&(v.bra=v.cursor,g()&&(v.slice_del(),1==e&&(v.ket=v.cursor,v.eq_s_b(2,"at")&&(v.bra=v.cursor,g()&&v.slice_del()))));break;case 6:if(!g())return!1;v.slice_del(),v.ket=v.cursor,(e=v.find_among_b(u,3))&&(v.bra=v.cursor,1==e&&g()&&v.slice_del());break;case 7:if(!g())return!1;v.slice_del(),v.ket=v.cursor,(e=v.find_among_b(w,3))&&(v.bra=v.cursor,1==e&&g()&&v.slice_del());break;case 8:if(!g())return!1;v.slice_del(),v.ket=v.cursor,v.eq_s_b(2,"at")&&(v.bra=v.cursor,g()&&v.slice_del());break;case 9:if(!b()||!v.eq_s_b(1,"e"))return!1;v.slice_from("ir")}return!0}function q(e,r){if(v.eq_s_b(1,e)){v.bra=v.cursor;var s=v.limit-v.cursor;if(v.eq_s_b(1,r))return v.cursor=v.limit-s,b()&&v.slice_del(),!1}return!0}function j(){if(!k()&&(v.cursor=v.limit,!function(){var e,r;if(v.cursor>=i){if(r=v.limit_backward,v.limit_backward=i,v.ket=v.cursor,e=v.find_among_b(c,120))return v.bra=v.cursor,1==e&&v.slice_del(),v.limit_backward=r,!0;v.limit_backward=r}return!1}()))return v.cursor=v.limit,v.ket=v.cursor,void((e=v.find_among_b(l,7))&&(v.bra=v.cursor,1==e&&b()&&v.slice_del()));var e;v.cursor=v.limit,v.ket=v.cursor,v.eq_s_b(1,"i")&&(v.bra=v.cursor,v.eq_s_b(1,"c")&&(v.cursor=v.limit,b()&&v.slice_del()))}this.setCurrent=function(e){v.setCurrent(e)},this.getCurrent=function(){return v.getCurrent()},this.stem=function(){var r,s=v.cursor;return function(){for(var e;;){if(v.bra=v.cursor,e=v.find_among(o,3))switch(v.ket=v.cursor,e){case 1:v.slice_from("a~");continue;case 2:v.slice_from("o~");continue;case 3:if(v.cursor>=v.limit)break;v.cursor++;continue}break}}(),v.cursor=s,r=v.cursor,i=v.limit,n=i,e=i,_(),v.cursor=r,h()&&(n=v.cursor,h()&&(e=v.cursor)),v.limit_backward=s,v.cursor=v.limit,j(),v.cursor=v.limit,function(){var e;if(v.ket=v.cursor,e=v.find_among_b(f,4))switch(v.bra=v.cursor,e){case 1:b()&&(v.slice_del(),v.ket=v.cursor,v.limit,v.cursor,q("u","g")&&q("i","c"));break;case 2:v.slice_from("c")}}(),v.cursor=v.limit_backward,function(){for(var e;;){if(v.bra=v.cursor,e=v.find_among(a,3))switch(v.ket=v.cursor,e){case 1:v.slice_from("ã");continue;case 2:v.slice_from("õ");continue;case 3:if(v.cursor>=v.limit)break;v.cursor++;continue}break}}(),!0}},function(e){return"function"==typeof e.update?e.update(function(e){return n.setCurrent(e),n.stem(),n.getCurrent()}):(n.setCurrent(e),n.stem(),n.getCurrent())}),e.Pipeline.registerFunction(e.pt.stemmer,"stemmer-pt"),e.pt.stopWordFilter=e.generateStopWordFilter("a ao aos aquela aquelas aquele aqueles aquilo as até com como da das de dela delas dele deles depois do dos e ela elas ele eles em entre era eram essa essas esse esses esta estamos estas estava estavam este esteja estejam estejamos estes esteve estive estivemos estiver estivera estiveram estiverem estivermos estivesse estivessem estivéramos estivéssemos estou está estávamos estão eu foi fomos for fora foram forem formos fosse fossem fui fôramos fôssemos haja hajam hajamos havemos hei houve houvemos houver houvera houveram houverei houverem houveremos houveria houveriam houvermos houverá houverão houveríamos houvesse houvessem houvéramos houvéssemos há hão isso isto já lhe lhes mais mas me mesmo meu meus minha minhas muito na nas nem no nos nossa nossas nosso nossos num numa não nós o os ou para pela pelas pelo pelos por qual quando que quem se seja sejam sejamos sem serei seremos seria seriam será serão seríamos seu seus somos sou sua suas são só também te tem temos tenha tenham tenhamos tenho terei teremos teria teriam terá terão teríamos teu teus teve tinha tinham tive tivemos tiver tivera tiveram tiverem tivermos tivesse tivessem tivéramos tivéssemos tu tua tuas tém tínhamos um uma você vocês vos à às éramos".split(" ")),e.Pipeline.registerFunction(e.pt.stopWordFilter,"stopWordFilter-pt")}}); \ No newline at end of file diff --git a/docs/assets/javascripts/lunr/lunr.ro.js b/docs/assets/javascripts/lunr/lunr.ro.js new file mode 100644 index 000000000..155cb5621 --- /dev/null +++ b/docs/assets/javascripts/lunr/lunr.ro.js @@ -0,0 +1 @@ +!function(e,i){"function"==typeof define&&define.amd?define(i):"object"==typeof exports?module.exports=i():i()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");var i,r,n;e.ro=function(){this.pipeline.reset(),this.pipeline.add(e.ro.trimmer,e.ro.stopWordFilter,e.ro.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.ro.stemmer))},e.ro.wordCharacters="A-Za-zªºÀ-ÖØ-öø-ʸˠ-ˤᴀ-ᴥᴬ-ᵜᵢ-ᵥᵫ-ᵷᵹ-ᶾḀ-ỿⁱⁿₐ-ₜKÅℲⅎⅠ-ↈⱠ-ⱿꜢ-ꞇꞋ-ꞭꞰ-ꞷꟷ-ꟿꬰ-ꭚꭜ-ꭤff-stA-Za-z",e.ro.trimmer=e.trimmerSupport.generateTrimmer(e.ro.wordCharacters),e.Pipeline.registerFunction(e.ro.trimmer,"trimmer-ro"),e.ro.stemmer=(i=e.stemmerSupport.Among,r=e.stemmerSupport.SnowballProgram,n=new function(){var e,n,t,a,o=[new i("",-1,3),new i("I",0,1),new i("U",0,2)],s=[new i("ea",-1,3),new i("aţia",-1,7),new i("aua",-1,2),new i("iua",-1,4),new i("aţie",-1,7),new i("ele",-1,3),new i("ile",-1,5),new i("iile",6,4),new i("iei",-1,4),new i("atei",-1,6),new i("ii",-1,4),new i("ului",-1,1),new i("ul",-1,1),new i("elor",-1,3),new i("ilor",-1,4),new i("iilor",14,4)],c=[new i("icala",-1,4),new i("iciva",-1,4),new i("ativa",-1,5),new i("itiva",-1,6),new i("icale",-1,4),new i("aţiune",-1,5),new i("iţiune",-1,6),new i("atoare",-1,5),new i("itoare",-1,6),new i("ătoare",-1,5),new i("icitate",-1,4),new i("abilitate",-1,1),new i("ibilitate",-1,2),new i("ivitate",-1,3),new i("icive",-1,4),new i("ative",-1,5),new i("itive",-1,6),new i("icali",-1,4),new i("atori",-1,5),new i("icatori",18,4),new i("itori",-1,6),new i("ători",-1,5),new i("icitati",-1,4),new i("abilitati",-1,1),new i("ivitati",-1,3),new i("icivi",-1,4),new i("ativi",-1,5),new i("itivi",-1,6),new i("icităi",-1,4),new i("abilităi",-1,1),new i("ivităi",-1,3),new i("icităţi",-1,4),new i("abilităţi",-1,1),new i("ivităţi",-1,3),new i("ical",-1,4),new i("ator",-1,5),new i("icator",35,4),new i("itor",-1,6),new i("ător",-1,5),new i("iciv",-1,4),new i("ativ",-1,5),new i("itiv",-1,6),new i("icală",-1,4),new i("icivă",-1,4),new i("ativă",-1,5),new i("itivă",-1,6)],u=[new i("ica",-1,1),new i("abila",-1,1),new i("ibila",-1,1),new i("oasa",-1,1),new i("ata",-1,1),new i("ita",-1,1),new i("anta",-1,1),new i("ista",-1,3),new i("uta",-1,1),new i("iva",-1,1),new i("ic",-1,1),new i("ice",-1,1),new i("abile",-1,1),new i("ibile",-1,1),new i("isme",-1,3),new i("iune",-1,2),new i("oase",-1,1),new i("ate",-1,1),new i("itate",17,1),new i("ite",-1,1),new i("ante",-1,1),new i("iste",-1,3),new i("ute",-1,1),new i("ive",-1,1),new i("ici",-1,1),new i("abili",-1,1),new i("ibili",-1,1),new i("iuni",-1,2),new i("atori",-1,1),new i("osi",-1,1),new i("ati",-1,1),new i("itati",30,1),new i("iti",-1,1),new i("anti",-1,1),new i("isti",-1,3),new i("uti",-1,1),new i("işti",-1,3),new i("ivi",-1,1),new i("ităi",-1,1),new i("oşi",-1,1),new i("ităţi",-1,1),new i("abil",-1,1),new i("ibil",-1,1),new i("ism",-1,3),new i("ator",-1,1),new i("os",-1,1),new i("at",-1,1),new i("it",-1,1),new i("ant",-1,1),new i("ist",-1,3),new i("ut",-1,1),new i("iv",-1,1),new i("ică",-1,1),new i("abilă",-1,1),new i("ibilă",-1,1),new i("oasă",-1,1),new i("ată",-1,1),new i("ită",-1,1),new i("antă",-1,1),new i("istă",-1,3),new i("ută",-1,1),new i("ivă",-1,1)],w=[new i("ea",-1,1),new i("ia",-1,1),new i("esc",-1,1),new i("ăsc",-1,1),new i("ind",-1,1),new i("ând",-1,1),new i("are",-1,1),new i("ere",-1,1),new i("ire",-1,1),new i("âre",-1,1),new i("se",-1,2),new i("ase",10,1),new i("sese",10,2),new i("ise",10,1),new i("use",10,1),new i("âse",10,1),new i("eşte",-1,1),new i("ăşte",-1,1),new i("eze",-1,1),new i("ai",-1,1),new i("eai",19,1),new i("iai",19,1),new i("sei",-1,2),new i("eşti",-1,1),new i("ăşti",-1,1),new i("ui",-1,1),new i("ezi",-1,1),new i("âi",-1,1),new i("aşi",-1,1),new i("seşi",-1,2),new i("aseşi",29,1),new i("seseşi",29,2),new i("iseşi",29,1),new i("useşi",29,1),new i("âseşi",29,1),new i("işi",-1,1),new i("uşi",-1,1),new i("âşi",-1,1),new i("aţi",-1,2),new i("eaţi",38,1),new i("iaţi",38,1),new i("eţi",-1,2),new i("iţi",-1,2),new i("âţi",-1,2),new i("arăţi",-1,1),new i("serăţi",-1,2),new i("aserăţi",45,1),new i("seserăţi",45,2),new i("iserăţi",45,1),new i("userăţi",45,1),new i("âserăţi",45,1),new i("irăţi",-1,1),new i("urăţi",-1,1),new i("ârăţi",-1,1),new i("am",-1,1),new i("eam",54,1),new i("iam",54,1),new i("em",-1,2),new i("asem",57,1),new i("sesem",57,2),new i("isem",57,1),new i("usem",57,1),new i("âsem",57,1),new i("im",-1,2),new i("âm",-1,2),new i("ăm",-1,2),new i("arăm",65,1),new i("serăm",65,2),new i("aserăm",67,1),new i("seserăm",67,2),new i("iserăm",67,1),new i("userăm",67,1),new i("âserăm",67,1),new i("irăm",65,1),new i("urăm",65,1),new i("ârăm",65,1),new i("au",-1,1),new i("eau",76,1),new i("iau",76,1),new i("indu",-1,1),new i("ându",-1,1),new i("ez",-1,1),new i("ească",-1,1),new i("ară",-1,1),new i("seră",-1,2),new i("aseră",84,1),new i("seseră",84,2),new i("iseră",84,1),new i("useră",84,1),new i("âseră",84,1),new i("iră",-1,1),new i("ură",-1,1),new i("âră",-1,1),new i("ează",-1,1)],m=[new i("a",-1,1),new i("e",-1,1),new i("ie",1,1),new i("i",-1,1),new i("ă",-1,1)],l=[17,65,16,0,0,0,0,0,0,0,0,0,0,0,0,0,2,32,0,0,4],f=new r;function p(e,i){f.eq_s(1,e)&&(f.ket=f.cursor,f.in_grouping(l,97,259)&&f.slice_from(i))}function d(){if(f.out_grouping(l,97,259)){for(;!f.in_grouping(l,97,259);){if(f.cursor>=f.limit)return!0;f.cursor++}return!1}return!0}function b(){var e,i,r=f.cursor;if(f.in_grouping(l,97,259)){if(e=f.cursor,!d())return void(a=f.cursor);if(f.cursor=e,!function(){if(f.in_grouping(l,97,259))for(;!f.out_grouping(l,97,259);){if(f.cursor>=f.limit)return!0;f.cursor++}return!1}())return void(a=f.cursor)}f.cursor=r,f.out_grouping(l,97,259)&&(i=f.cursor,d()&&(f.cursor=i,f.in_grouping(l,97,259)&&f.cursor=f.limit)return!1;f.cursor++}for(;!f.out_grouping(l,97,259);){if(f.cursor>=f.limit)return!1;f.cursor++}return!0}function _(){return t<=f.cursor}function g(){var i,r=f.limit-f.cursor;if(f.ket=f.cursor,(i=f.find_among_b(c,46))&&(f.bra=f.cursor,_())){switch(i){case 1:f.slice_from("abil");break;case 2:f.slice_from("ibil");break;case 3:f.slice_from("iv");break;case 4:f.slice_from("ic");break;case 5:f.slice_from("at");break;case 6:f.slice_from("it")}return e=!0,f.cursor=f.limit-r,!0}return!1}function k(){var i,r;for(e=!1;;)if(r=f.limit-f.cursor,!g()){f.cursor=f.limit-r;break}if(f.ket=f.cursor,(i=f.find_among_b(u,62))&&(f.bra=f.cursor,n<=f.cursor)){switch(i){case 1:f.slice_del();break;case 2:f.eq_s_b(1,"ţ")&&(f.bra=f.cursor,f.slice_from("t"));break;case 3:f.slice_from("ist")}e=!0}}function h(){var e;f.ket=f.cursor,(e=f.find_among_b(m,5))&&(f.bra=f.cursor,a<=f.cursor&&1==e&&f.slice_del())}this.setCurrent=function(e){f.setCurrent(e)},this.getCurrent=function(){return f.getCurrent()},this.stem=function(){var i,r=f.cursor;return function(){for(var e,i;e=f.cursor,f.in_grouping(l,97,259)&&(i=f.cursor,f.bra=i,p("u","U"),f.cursor=i,p("i","I")),f.cursor=e,!(f.cursor>=f.limit);)f.cursor++}(),f.cursor=r,i=f.cursor,a=f.limit,t=a,n=a,b(),f.cursor=i,v()&&(t=f.cursor,v()&&(n=f.cursor)),f.limit_backward=r,f.cursor=f.limit,function(){var e,i;if(f.ket=f.cursor,(e=f.find_among_b(s,16))&&(f.bra=f.cursor,_()))switch(e){case 1:f.slice_del();break;case 2:f.slice_from("a");break;case 3:f.slice_from("e");break;case 4:f.slice_from("i");break;case 5:i=f.limit-f.cursor,f.eq_s_b(2,"ab")||(f.cursor=f.limit-i,f.slice_from("i"));break;case 6:f.slice_from("at");break;case 7:f.slice_from("aţi")}}(),f.cursor=f.limit,k(),f.cursor=f.limit,e||(f.cursor=f.limit,function(){var e,i,r;if(f.cursor>=a){if(i=f.limit_backward,f.limit_backward=a,f.ket=f.cursor,e=f.find_among_b(w,94))switch(f.bra=f.cursor,e){case 1:if(r=f.limit-f.cursor,!f.out_grouping_b(l,97,259)&&(f.cursor=f.limit-r,!f.eq_s_b(1,"u")))break;case 2:f.slice_del()}f.limit_backward=i}}(),f.cursor=f.limit),h(),f.cursor=f.limit_backward,function(){for(var e;;){if(f.bra=f.cursor,e=f.find_among(o,3))switch(f.ket=f.cursor,e){case 1:f.slice_from("i");continue;case 2:f.slice_from("u");continue;case 3:if(f.cursor>=f.limit)break;f.cursor++;continue}break}}(),!0}},function(e){return"function"==typeof e.update?e.update(function(e){return n.setCurrent(e),n.stem(),n.getCurrent()}):(n.setCurrent(e),n.stem(),n.getCurrent())}),e.Pipeline.registerFunction(e.ro.stemmer,"stemmer-ro"),e.ro.stopWordFilter=e.generateStopWordFilter("acea aceasta această aceea acei aceia acel acela acele acelea acest acesta aceste acestea aceşti aceştia acolo acord acum ai aia aibă aici al ale alea altceva altcineva am ar are asemenea asta astea astăzi asupra au avea avem aveţi azi aş aşadar aţi bine bucur bună ca care caut ce cel ceva chiar cinci cine cineva contra cu cum cumva curând curînd când cât câte câtva câţi cînd cît cîte cîtva cîţi că căci cărei căror cărui către da dacă dar datorită dată dau de deci deja deoarece departe deşi din dinaintea dintr- dintre doi doilea două drept după dă ea ei el ele eram este eu eşti face fata fi fie fiecare fii fim fiu fiţi frumos fără graţie halbă iar ieri la le li lor lui lângă lîngă mai mea mei mele mereu meu mi mie mine mult multă mulţi mulţumesc mâine mîine mă ne nevoie nici nicăieri nimeni nimeri nimic nişte noastre noastră noi noroc nostru nouă noştri nu opt ori oricare orice oricine oricum oricând oricât oricînd oricît oriunde patra patru patrulea pe pentru peste pic poate pot prea prima primul prin puţin puţina puţină până pînă rog sa sale sau se spate spre sub sunt suntem sunteţi sută sînt sîntem sînteţi să săi său ta tale te timp tine toate toată tot totuşi toţi trei treia treilea tu tăi tău un una unde undeva unei uneia unele uneori unii unor unora unu unui unuia unul vi voastre voastră voi vostru vouă voştri vreme vreo vreun vă zece zero zi zice îi îl îmi împotriva în înainte înaintea încotro încât încît între întrucât întrucît îţi ăla ălea ăsta ăstea ăştia şapte şase şi ştiu ţi ţie".split(" ")),e.Pipeline.registerFunction(e.ro.stopWordFilter,"stopWordFilter-ro")}}); \ No newline at end of file diff --git a/docs/assets/javascripts/lunr/lunr.ru.js b/docs/assets/javascripts/lunr/lunr.ru.js new file mode 100644 index 000000000..078609ad8 --- /dev/null +++ b/docs/assets/javascripts/lunr/lunr.ru.js @@ -0,0 +1 @@ +!function(e,n){"function"==typeof define&&define.amd?define(n):"object"==typeof exports?module.exports=n():n()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");var n,r,t;e.ru=function(){this.pipeline.reset(),this.pipeline.add(e.ru.trimmer,e.ru.stopWordFilter,e.ru.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.ru.stemmer))},e.ru.wordCharacters="Ѐ-҄҇-ԯᴫᵸⷠ-ⷿꙀ-ꚟ︮︯",e.ru.trimmer=e.trimmerSupport.generateTrimmer(e.ru.wordCharacters),e.Pipeline.registerFunction(e.ru.trimmer,"trimmer-ru"),e.ru.stemmer=(n=e.stemmerSupport.Among,r=e.stemmerSupport.SnowballProgram,t=new function(){var e,t,w=[new n("в",-1,1),new n("ив",0,2),new n("ыв",0,2),new n("вши",-1,1),new n("ивши",3,2),new n("ывши",3,2),new n("вшись",-1,1),new n("ившись",6,2),new n("ывшись",6,2)],i=[new n("ее",-1,1),new n("ие",-1,1),new n("ое",-1,1),new n("ые",-1,1),new n("ими",-1,1),new n("ыми",-1,1),new n("ей",-1,1),new n("ий",-1,1),new n("ой",-1,1),new n("ый",-1,1),new n("ем",-1,1),new n("им",-1,1),new n("ом",-1,1),new n("ым",-1,1),new n("его",-1,1),new n("ого",-1,1),new n("ему",-1,1),new n("ому",-1,1),new n("их",-1,1),new n("ых",-1,1),new n("ею",-1,1),new n("ою",-1,1),new n("ую",-1,1),new n("юю",-1,1),new n("ая",-1,1),new n("яя",-1,1)],u=[new n("ем",-1,1),new n("нн",-1,1),new n("вш",-1,1),new n("ивш",2,2),new n("ывш",2,2),new n("щ",-1,1),new n("ющ",5,1),new n("ующ",6,2)],s=[new n("сь",-1,1),new n("ся",-1,1)],o=[new n("ла",-1,1),new n("ила",0,2),new n("ыла",0,2),new n("на",-1,1),new n("ена",3,2),new n("ете",-1,1),new n("ите",-1,2),new n("йте",-1,1),new n("ейте",7,2),new n("уйте",7,2),new n("ли",-1,1),new n("или",10,2),new n("ыли",10,2),new n("й",-1,1),new n("ей",13,2),new n("уй",13,2),new n("л",-1,1),new n("ил",16,2),new n("ыл",16,2),new n("ем",-1,1),new n("им",-1,2),new n("ым",-1,2),new n("н",-1,1),new n("ен",22,2),new n("ло",-1,1),new n("ило",24,2),new n("ыло",24,2),new n("но",-1,1),new n("ено",27,2),new n("нно",27,1),new n("ет",-1,1),new n("ует",30,2),new n("ит",-1,2),new n("ыт",-1,2),new n("ют",-1,1),new n("уют",34,2),new n("ят",-1,2),new n("ны",-1,1),new n("ены",37,2),new n("ть",-1,1),new n("ить",39,2),new n("ыть",39,2),new n("ешь",-1,1),new n("ишь",-1,2),new n("ю",-1,2),new n("ую",44,2)],c=[new n("а",-1,1),new n("ев",-1,1),new n("ов",-1,1),new n("е",-1,1),new n("ие",3,1),new n("ье",3,1),new n("и",-1,1),new n("еи",6,1),new n("ии",6,1),new n("ами",6,1),new n("ями",6,1),new n("иями",10,1),new n("й",-1,1),new n("ей",12,1),new n("ией",13,1),new n("ий",12,1),new n("ой",12,1),new n("ам",-1,1),new n("ем",-1,1),new n("ием",18,1),new n("ом",-1,1),new n("ям",-1,1),new n("иям",21,1),new n("о",-1,1),new n("у",-1,1),new n("ах",-1,1),new n("ях",-1,1),new n("иях",26,1),new n("ы",-1,1),new n("ь",-1,1),new n("ю",-1,1),new n("ию",30,1),new n("ью",30,1),new n("я",-1,1),new n("ия",33,1),new n("ья",33,1)],m=[new n("ост",-1,1),new n("ость",-1,1)],l=[new n("ейше",-1,1),new n("н",-1,2),new n("ейш",-1,1),new n("ь",-1,3)],f=[33,65,8,232],a=new r;function p(){for(;!a.in_grouping(f,1072,1103);){if(a.cursor>=a.limit)return!1;a.cursor++}return!0}function d(){for(;!a.out_grouping(f,1072,1103);){if(a.cursor>=a.limit)return!1;a.cursor++}return!0}function _(e,n){var r,t;if(a.ket=a.cursor,r=a.find_among_b(e,n)){switch(a.bra=a.cursor,r){case 1:if(t=a.limit-a.cursor,!a.eq_s_b(1,"а")&&(a.cursor=a.limit-t,!a.eq_s_b(1,"я")))return!1;case 2:a.slice_del()}return!0}return!1}function b(e,n){var r;return a.ket=a.cursor,!!(r=a.find_among_b(e,n))&&(a.bra=a.cursor,1==r&&a.slice_del(),!0)}function h(){return!!b(i,26)&&(_(u,8),!0)}function g(){var n;a.ket=a.cursor,(n=a.find_among_b(m,2))&&(a.bra=a.cursor,e<=a.cursor&&1==n&&a.slice_del())}this.setCurrent=function(e){a.setCurrent(e)},this.getCurrent=function(){return a.getCurrent()},this.stem=function(){return t=a.limit,e=t,p()&&(t=a.cursor,d()&&p()&&d()&&(e=a.cursor)),a.cursor=a.limit,!(a.cursor=i&&t[(e-=i)>>3]&1<<(7&e))return this.cursor++,!0}return!1},in_grouping_b:function(t,i,s){if(this.cursor>this.limit_backward){var e=r.charCodeAt(this.cursor-1);if(e<=s&&e>=i&&t[(e-=i)>>3]&1<<(7&e))return this.cursor--,!0}return!1},out_grouping:function(t,i,s){if(this.cursors||e>3]&1<<(7&e)))return this.cursor++,!0}return!1},out_grouping_b:function(t,i,s){if(this.cursor>this.limit_backward){var e=r.charCodeAt(this.cursor-1);if(e>s||e>3]&1<<(7&e)))return this.cursor--,!0}return!1},eq_s:function(t,i){if(this.limit-this.cursor>1),f=0,l=o0||e==s||c)break;c=!0}}for(;;){if(o>=(_=t[s]).s_size){if(this.cursor=n+_.s_size,!_.method)return _.result;var b=_.method();if(this.cursor=n+_.s_size,b)return _.result}if((s=_.substring_i)<0)return 0}},find_among_b:function(t,i){for(var s=0,e=i,n=this.cursor,u=this.limit_backward,o=0,h=0,c=!1;;){for(var a=s+(e-s>>1),f=0,l=o=0;_--){if(n-l==u){f=-1;break}if(f=r.charCodeAt(n-1-l)-m.s[_])break;l++}if(f<0?(e=a,h=l):(s=a,o=l),e-s<=1){if(s>0||e==s||c)break;c=!0}}for(;;){var m;if(o>=(m=t[s]).s_size){if(this.cursor=n-m.s_size,!m.method)return m.result;var b=m.method();if(this.cursor=n-m.s_size,b)return m.result}if((s=m.substring_i)<0)return 0}},replace_s:function(t,i,s){var e=s.length-(i-t),n=r.substring(0,t),u=r.substring(i);return r=n+s+u,this.limit+=e,this.cursor>=i?this.cursor+=e:this.cursor>t&&(this.cursor=t),e},slice_check:function(){if(this.bra<0||this.bra>this.ket||this.ket>this.limit||this.limit>r.length)throw"faulty slice operation"},slice_from:function(r){this.slice_check(),this.replace_s(this.bra,this.ket,r)},slice_del:function(){this.slice_from("")},insert:function(r,t,i){var s=this.replace_s(r,t,i);r<=this.bra&&(this.bra+=s),r<=this.ket&&(this.ket+=s)},slice_to:function(){return this.slice_check(),r.substring(this.bra,this.ket)},eq_v_b:function(r){return this.eq_s_b(r.length,r)}}}},r.trimmerSupport={generateTrimmer:function(r){var t=new RegExp("^[^"+r+"]+"),i=new RegExp("[^"+r+"]+$");return function(r){return"function"==typeof r.update?r.update(function(r){return r.replace(t,"").replace(i,"")}):r.replace(t,"").replace(i,"")}}}}}); \ No newline at end of file diff --git a/docs/assets/javascripts/lunr/lunr.sv.js b/docs/assets/javascripts/lunr/lunr.sv.js new file mode 100644 index 000000000..4bb0f9f92 --- /dev/null +++ b/docs/assets/javascripts/lunr/lunr.sv.js @@ -0,0 +1 @@ +!function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");var r,n,t;e.sv=function(){this.pipeline.reset(),this.pipeline.add(e.sv.trimmer,e.sv.stopWordFilter,e.sv.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.sv.stemmer))},e.sv.wordCharacters="A-Za-zªºÀ-ÖØ-öø-ʸˠ-ˤᴀ-ᴥᴬ-ᵜᵢ-ᵥᵫ-ᵷᵹ-ᶾḀ-ỿⁱⁿₐ-ₜKÅℲⅎⅠ-ↈⱠ-ⱿꜢ-ꞇꞋ-ꞭꞰ-ꞷꟷ-ꟿꬰ-ꭚꭜ-ꭤff-stA-Za-z",e.sv.trimmer=e.trimmerSupport.generateTrimmer(e.sv.wordCharacters),e.Pipeline.registerFunction(e.sv.trimmer,"trimmer-sv"),e.sv.stemmer=(r=e.stemmerSupport.Among,n=e.stemmerSupport.SnowballProgram,t=new function(){var e,t,i=[new r("a",-1,1),new r("arna",0,1),new r("erna",0,1),new r("heterna",2,1),new r("orna",0,1),new r("ad",-1,1),new r("e",-1,1),new r("ade",6,1),new r("ande",6,1),new r("arne",6,1),new r("are",6,1),new r("aste",6,1),new r("en",-1,1),new r("anden",12,1),new r("aren",12,1),new r("heten",12,1),new r("ern",-1,1),new r("ar",-1,1),new r("er",-1,1),new r("heter",18,1),new r("or",-1,1),new r("s",-1,2),new r("as",21,1),new r("arnas",22,1),new r("ernas",22,1),new r("ornas",22,1),new r("es",21,1),new r("ades",26,1),new r("andes",26,1),new r("ens",21,1),new r("arens",29,1),new r("hetens",29,1),new r("erns",21,1),new r("at",-1,1),new r("andet",-1,1),new r("het",-1,1),new r("ast",-1,1)],s=[new r("dd",-1,-1),new r("gd",-1,-1),new r("nn",-1,-1),new r("dt",-1,-1),new r("gt",-1,-1),new r("kt",-1,-1),new r("tt",-1,-1)],a=[new r("ig",-1,1),new r("lig",0,1),new r("els",-1,1),new r("fullt",-1,3),new r("löst",-1,2)],o=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,24,0,32],u=[119,127,149],m=new n;this.setCurrent=function(e){m.setCurrent(e)},this.getCurrent=function(){return m.getCurrent()},this.stem=function(){var r,n=m.cursor;return function(){var r,n=m.cursor+3;if(t=m.limit,0<=n||n<=m.limit){for(e=n;;){if(r=m.cursor,m.in_grouping(o,97,246)){m.cursor=r;break}if(m.cursor=r,m.cursor>=m.limit)return;m.cursor++}for(;!m.out_grouping(o,97,246);){if(m.cursor>=m.limit)return;m.cursor++}(t=m.cursor)=t&&(m.limit_backward=t,m.cursor=m.limit,m.ket=m.cursor,e=m.find_among_b(i,37),m.limit_backward=r,e))switch(m.bra=m.cursor,e){case 1:m.slice_del();break;case 2:m.in_grouping_b(u,98,121)&&m.slice_del()}}(),m.cursor=m.limit,r=m.limit_backward,m.cursor>=t&&(m.limit_backward=t,m.cursor=m.limit,m.find_among_b(s,7)&&(m.cursor=m.limit,m.ket=m.cursor,m.cursor>m.limit_backward&&(m.bra=--m.cursor,m.slice_del())),m.limit_backward=r),m.cursor=m.limit,function(){var e,r;if(m.cursor>=t){if(r=m.limit_backward,m.limit_backward=t,m.cursor=m.limit,m.ket=m.cursor,e=m.find_among_b(a,5))switch(m.bra=m.cursor,e){case 1:m.slice_del();break;case 2:m.slice_from("lös");break;case 3:m.slice_from("full")}m.limit_backward=r}}(),!0}},function(e){return"function"==typeof e.update?e.update(function(e){return t.setCurrent(e),t.stem(),t.getCurrent()}):(t.setCurrent(e),t.stem(),t.getCurrent())}),e.Pipeline.registerFunction(e.sv.stemmer,"stemmer-sv"),e.sv.stopWordFilter=e.generateStopWordFilter("alla allt att av blev bli blir blivit de dem den denna deras dess dessa det detta dig din dina ditt du där då efter ej eller en er era ert ett från för ha hade han hans har henne hennes hon honom hur här i icke ingen inom inte jag ju kan kunde man med mellan men mig min mina mitt mot mycket ni nu när någon något några och om oss på samma sedan sig sin sina sitta själv skulle som så sådan sådana sådant till under upp ut utan vad var vara varför varit varje vars vart vem vi vid vilka vilkas vilken vilket vår våra vårt än är åt över".split(" ")),e.Pipeline.registerFunction(e.sv.stopWordFilter,"stopWordFilter-sv")}}); \ No newline at end of file diff --git a/docs/assets/javascripts/lunr/lunr.tr.js b/docs/assets/javascripts/lunr/lunr.tr.js new file mode 100644 index 000000000..c42b349e8 --- /dev/null +++ b/docs/assets/javascripts/lunr/lunr.tr.js @@ -0,0 +1 @@ +!function(r,i){"function"==typeof define&&define.amd?define(i):"object"==typeof exports?module.exports=i():i()(r.lunr)}(this,function(){return function(r){if(void 0===r)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===r.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");var i,e,n;r.tr=function(){this.pipeline.reset(),this.pipeline.add(r.tr.trimmer,r.tr.stopWordFilter,r.tr.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(r.tr.stemmer))},r.tr.wordCharacters="A-Za-zªºÀ-ÖØ-öø-ʸˠ-ˤᴀ-ᴥᴬ-ᵜᵢ-ᵥᵫ-ᵷᵹ-ᶾḀ-ỿⁱⁿₐ-ₜKÅℲⅎⅠ-ↈⱠ-ⱿꜢ-ꞇꞋ-ꞭꞰ-ꞷꟷ-ꟿꬰ-ꭚꭜ-ꭤff-stA-Za-z",r.tr.trimmer=r.trimmerSupport.generateTrimmer(r.tr.wordCharacters),r.Pipeline.registerFunction(r.tr.trimmer,"trimmer-tr"),r.tr.stemmer=(i=r.stemmerSupport.Among,e=r.stemmerSupport.SnowballProgram,n=new function(){var r,n=[new i("m",-1,-1),new i("n",-1,-1),new i("miz",-1,-1),new i("niz",-1,-1),new i("muz",-1,-1),new i("nuz",-1,-1),new i("müz",-1,-1),new i("nüz",-1,-1),new i("mız",-1,-1),new i("nız",-1,-1)],t=[new i("leri",-1,-1),new i("ları",-1,-1)],u=[new i("ni",-1,-1),new i("nu",-1,-1),new i("nü",-1,-1),new i("nı",-1,-1)],o=[new i("in",-1,-1),new i("un",-1,-1),new i("ün",-1,-1),new i("ın",-1,-1)],s=[new i("a",-1,-1),new i("e",-1,-1)],c=[new i("na",-1,-1),new i("ne",-1,-1)],l=[new i("da",-1,-1),new i("ta",-1,-1),new i("de",-1,-1),new i("te",-1,-1)],a=[new i("nda",-1,-1),new i("nde",-1,-1)],m=[new i("dan",-1,-1),new i("tan",-1,-1),new i("den",-1,-1),new i("ten",-1,-1)],d=[new i("ndan",-1,-1),new i("nden",-1,-1)],f=[new i("la",-1,-1),new i("le",-1,-1)],b=[new i("ca",-1,-1),new i("ce",-1,-1)],w=[new i("im",-1,-1),new i("um",-1,-1),new i("üm",-1,-1),new i("ım",-1,-1)],_=[new i("sin",-1,-1),new i("sun",-1,-1),new i("sün",-1,-1),new i("sın",-1,-1)],k=[new i("iz",-1,-1),new i("uz",-1,-1),new i("üz",-1,-1),new i("ız",-1,-1)],p=[new i("siniz",-1,-1),new i("sunuz",-1,-1),new i("sünüz",-1,-1),new i("sınız",-1,-1)],g=[new i("lar",-1,-1),new i("ler",-1,-1)],y=[new i("niz",-1,-1),new i("nuz",-1,-1),new i("nüz",-1,-1),new i("nız",-1,-1)],z=[new i("dir",-1,-1),new i("tir",-1,-1),new i("dur",-1,-1),new i("tur",-1,-1),new i("dür",-1,-1),new i("tür",-1,-1),new i("dır",-1,-1),new i("tır",-1,-1)],h=[new i("casına",-1,-1),new i("cesine",-1,-1)],v=[new i("di",-1,-1),new i("ti",-1,-1),new i("dik",-1,-1),new i("tik",-1,-1),new i("duk",-1,-1),new i("tuk",-1,-1),new i("dük",-1,-1),new i("tük",-1,-1),new i("dık",-1,-1),new i("tık",-1,-1),new i("dim",-1,-1),new i("tim",-1,-1),new i("dum",-1,-1),new i("tum",-1,-1),new i("düm",-1,-1),new i("tüm",-1,-1),new i("dım",-1,-1),new i("tım",-1,-1),new i("din",-1,-1),new i("tin",-1,-1),new i("dun",-1,-1),new i("tun",-1,-1),new i("dün",-1,-1),new i("tün",-1,-1),new i("dın",-1,-1),new i("tın",-1,-1),new i("du",-1,-1),new i("tu",-1,-1),new i("dü",-1,-1),new i("tü",-1,-1),new i("dı",-1,-1),new i("tı",-1,-1)],q=[new i("sa",-1,-1),new i("se",-1,-1),new i("sak",-1,-1),new i("sek",-1,-1),new i("sam",-1,-1),new i("sem",-1,-1),new i("san",-1,-1),new i("sen",-1,-1)],C=[new i("miş",-1,-1),new i("muş",-1,-1),new i("müş",-1,-1),new i("mış",-1,-1)],P=[new i("b",-1,1),new i("c",-1,2),new i("d",-1,3),new i("ğ",-1,4)],F=[17,65,16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,32,8,0,0,0,0,0,0,1],S=[1,16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8,0,0,0,0,0,0,1],W=[65],L=[65],x=[["a",[1,64,16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1],97,305],["e",[17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,130],101,252],["ı",[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1],97,305],["i",[17],101,105],["o",W,111,117],["ö",L,246,252],["u",W,111,117]],A=new e;function E(r,i,e){for(;;){var n=A.limit-A.cursor;if(A.in_grouping_b(r,i,e)){A.cursor=A.limit-n;break}if(A.cursor=A.limit-n,A.cursor<=A.limit_backward)return!1;A.cursor--}return!0}function j(){var r,i;r=A.limit-A.cursor,E(F,97,305);for(var e=0;eA.limit_backward&&(A.cursor--,e=A.limit-A.cursor,i()))?(A.cursor=A.limit-e,!0):(A.cursor=A.limit-n,r()?(A.cursor=A.limit-n,!1):(A.cursor=A.limit-n,!(A.cursor<=A.limit_backward)&&(A.cursor--,!!i()&&(A.cursor=A.limit-n,!0))))}function Z(r){return T(r,function(){return A.in_grouping_b(F,97,305)})}function B(){return Z(function(){return A.eq_s_b(1,"n")})}function D(){return Z(function(){return A.eq_s_b(1,"y")})}function G(){return A.find_among_b(n,10)&&T(function(){return A.in_grouping_b(S,105,305)},function(){return A.out_grouping_b(F,97,305)})}function H(){return j()&&A.in_grouping_b(S,105,305)&&Z(function(){return A.eq_s_b(1,"s")})}function I(){return A.find_among_b(t,2)}function J(){return j()&&A.find_among_b(o,4)&&B()}function K(){return j()&&A.find_among_b(l,4)}function M(){return j()&&A.find_among_b(a,2)}function N(){return j()&&A.find_among_b(w,4)&&D()}function O(){return j()&&A.find_among_b(_,4)}function Q(){return j()&&A.find_among_b(k,4)&&D()}function R(){return A.find_among_b(p,4)}function U(){return j()&&A.find_among_b(g,2)}function V(){return j()&&A.find_among_b(z,8)}function X(){return j()&&A.find_among_b(v,32)&&D()}function Y(){return A.find_among_b(q,8)&&D()}function $(){return j()&&A.find_among_b(C,4)&&D()}function rr(){var r=A.limit-A.cursor;return!($()||(A.cursor=A.limit-r,X()||(A.cursor=A.limit-r,Y()||(A.cursor=A.limit-r,A.eq_s_b(3,"ken")&&D()))))}function ir(){if(A.find_among_b(h,2)){var r=A.limit-A.cursor;if(R()||(A.cursor=A.limit-r,U()||(A.cursor=A.limit-r,N()||(A.cursor=A.limit-r,O()||(A.cursor=A.limit-r,Q()||(A.cursor=A.limit-r))))),$())return!1}return!0}function er(){if(!j()||!A.find_among_b(y,4))return!0;var r=A.limit-A.cursor;return!X()&&(A.cursor=A.limit-r,!Y())}function nr(){var i,e,n,t=A.limit-A.cursor;if(A.ket=A.cursor,r=!0,rr()&&(A.cursor=A.limit-t,ir()&&(A.cursor=A.limit-t,function(){if(U()){A.bra=A.cursor,A.slice_del();var i=A.limit-A.cursor;return A.ket=A.cursor,V()||(A.cursor=A.limit-i,X()||(A.cursor=A.limit-i,Y()||(A.cursor=A.limit-i,$()||(A.cursor=A.limit-i)))),r=!1,!1}return!0}()&&(A.cursor=A.limit-t,er()&&(A.cursor=A.limit-t,n=A.limit-A.cursor,!(R()||(A.cursor=A.limit-n,Q()||(A.cursor=A.limit-n,O()||(A.cursor=A.limit-n,N()))))||(A.bra=A.cursor,A.slice_del(),e=A.limit-A.cursor,A.ket=A.cursor,$()||(A.cursor=A.limit-e),0)))))){if(A.cursor=A.limit-t,!V())return;A.bra=A.cursor,A.slice_del(),A.ket=A.cursor,i=A.limit-A.cursor,R()||(A.cursor=A.limit-i,U()||(A.cursor=A.limit-i,N()||(A.cursor=A.limit-i,O()||(A.cursor=A.limit-i,Q()||(A.cursor=A.limit-i))))),$()||(A.cursor=A.limit-i)}A.bra=A.cursor,A.slice_del()}function tr(){var r,i,e,n;if(A.ket=A.cursor,A.eq_s_b(2,"ki")){if(r=A.limit-A.cursor,K())return A.bra=A.cursor,A.slice_del(),i=A.limit-A.cursor,A.ket=A.cursor,U()?(A.bra=A.cursor,A.slice_del(),tr()):(A.cursor=A.limit-i,G()&&(A.bra=A.cursor,A.slice_del(),A.ket=A.cursor,U()&&(A.bra=A.cursor,A.slice_del(),tr()))),!0;if(A.cursor=A.limit-r,J()){if(A.bra=A.cursor,A.slice_del(),A.ket=A.cursor,e=A.limit-A.cursor,I())A.bra=A.cursor,A.slice_del();else{if(A.cursor=A.limit-e,A.ket=A.cursor,!G()&&(A.cursor=A.limit-e,!H()&&(A.cursor=A.limit-e,!tr())))return!0;A.bra=A.cursor,A.slice_del(),A.ket=A.cursor,U()&&(A.bra=A.cursor,A.slice_del(),tr())}return!0}if(A.cursor=A.limit-r,M()){if(n=A.limit-A.cursor,I())A.bra=A.cursor,A.slice_del();else if(A.cursor=A.limit-n,H())A.bra=A.cursor,A.slice_del(),A.ket=A.cursor,U()&&(A.bra=A.cursor,A.slice_del(),tr());else if(A.cursor=A.limit-n,!tr())return!1;return!0}}return!1}function ur(r){if(A.ket=A.cursor,!M()&&(A.cursor=A.limit-r,!j()||!A.find_among_b(c,2)))return!1;var i=A.limit-A.cursor;if(I())A.bra=A.cursor,A.slice_del();else if(A.cursor=A.limit-i,H())A.bra=A.cursor,A.slice_del(),A.ket=A.cursor,U()&&(A.bra=A.cursor,A.slice_del(),tr());else if(A.cursor=A.limit-i,!tr())return!1;return!0}function or(r){if(A.ket=A.cursor,!(j()&&A.find_among_b(d,2)||(A.cursor=A.limit-r,j()&&A.find_among_b(u,4))))return!1;var i=A.limit-A.cursor;return!(!H()&&(A.cursor=A.limit-i,!I()))&&(A.bra=A.cursor,A.slice_del(),A.ket=A.cursor,U()&&(A.bra=A.cursor,A.slice_del(),tr()),!0)}function sr(){var r,i=A.limit-A.cursor;return A.ket=A.cursor,!!(J()||(A.cursor=A.limit-i,j()&&A.find_among_b(f,2)&&D()))&&(A.bra=A.cursor,A.slice_del(),r=A.limit-A.cursor,A.ket=A.cursor,!(!U()||(A.bra=A.cursor,A.slice_del(),!tr()))||(A.cursor=A.limit-r,A.ket=A.cursor,!(G()||(A.cursor=A.limit-r,H()||(A.cursor=A.limit-r,tr())))||(A.bra=A.cursor,A.slice_del(),A.ket=A.cursor,U()&&(A.bra=A.cursor,A.slice_del(),tr()),!0)))}function cr(){var r,i,e=A.limit-A.cursor;if(A.ket=A.cursor,!(K()||(A.cursor=A.limit-e,j()&&A.in_grouping_b(S,105,305)&&D()||(A.cursor=A.limit-e,j()&&A.find_among_b(s,2)&&D()))))return!1;if(A.bra=A.cursor,A.slice_del(),A.ket=A.cursor,r=A.limit-A.cursor,G())A.bra=A.cursor,A.slice_del(),i=A.limit-A.cursor,A.ket=A.cursor,U()||(A.cursor=A.limit-i);else if(A.cursor=A.limit-r,!U())return!0;return A.bra=A.cursor,A.slice_del(),A.ket=A.cursor,tr(),!0}function lr(){var r,i,e=A.limit-A.cursor;if(A.ket=A.cursor,U())return A.bra=A.cursor,A.slice_del(),void tr();if(A.cursor=A.limit-e,A.ket=A.cursor,j()&&A.find_among_b(b,2)&&B())if(A.bra=A.cursor,A.slice_del(),r=A.limit-A.cursor,A.ket=A.cursor,I())A.bra=A.cursor,A.slice_del();else{if(A.cursor=A.limit-r,A.ket=A.cursor,!G()&&(A.cursor=A.limit-r,!H())){if(A.cursor=A.limit-r,A.ket=A.cursor,!U())return;if(A.bra=A.cursor,A.slice_del(),!tr())return}A.bra=A.cursor,A.slice_del(),A.ket=A.cursor,U()&&(A.bra=A.cursor,A.slice_del(),tr())}else if(A.cursor=A.limit-e,!ur(e)&&(A.cursor=A.limit-e,!or(e))){if(A.cursor=A.limit-e,A.ket=A.cursor,j()&&A.find_among_b(m,4))return A.bra=A.cursor,A.slice_del(),A.ket=A.cursor,i=A.limit-A.cursor,void(G()?(A.bra=A.cursor,A.slice_del(),A.ket=A.cursor,U()&&(A.bra=A.cursor,A.slice_del(),tr())):(A.cursor=A.limit-i,U()?(A.bra=A.cursor,A.slice_del(),tr()):(A.cursor=A.limit-i,tr())));if(A.cursor=A.limit-e,!sr()){if(A.cursor=A.limit-e,I())return A.bra=A.cursor,void A.slice_del();A.cursor=A.limit-e,tr()||(A.cursor=A.limit-e,cr()||(A.cursor=A.limit-e,A.ket=A.cursor,(G()||(A.cursor=A.limit-e,H()))&&(A.bra=A.cursor,A.slice_del(),A.ket=A.cursor,U()&&(A.bra=A.cursor,A.slice_del(),tr()))))}}}function ar(r,i,e){if(A.cursor=A.limit-r,function(){for(;;){var r=A.limit-A.cursor;if(A.in_grouping_b(F,97,305)){A.cursor=A.limit-r;break}if(A.cursor=A.limit-r,A.cursor<=A.limit_backward)return!1;A.cursor--}return!0}()){var n=A.limit-A.cursor;if(!A.eq_s_b(1,i)&&(A.cursor=A.limit-n,!A.eq_s_b(1,e)))return!0;A.cursor=A.limit-r;var t=A.cursor;return A.insert(A.cursor,A.cursor,e),A.cursor=t,!1}return!0}function mr(r,i,e){for(;!A.eq_s(i,e);){if(A.cursor>=A.limit)return!0;A.cursor++}return i!=A.limit||(A.cursor=r,!1)}function dr(){var r,i,e=A.cursor;return!(!mr(r=A.cursor,2,"ad")||(A.cursor=r,!mr(r,5,"soyad")))&&(A.limit_backward=e,A.cursor=A.limit,i=A.limit-A.cursor,(A.eq_s_b(1,"d")||(A.cursor=A.limit-i,A.eq_s_b(1,"g")))&&ar(i,"a","ı")&&ar(i,"e","i")&&ar(i,"o","u")&&ar(i,"ö","ü"),A.cursor=A.limit,function(){var r;if(A.ket=A.cursor,r=A.find_among_b(P,4))switch(A.bra=A.cursor,r){case 1:A.slice_from("p");break;case 2:A.slice_from("ç");break;case 3:A.slice_from("t");break;case 4:A.slice_from("k")}}(),!0)}this.setCurrent=function(r){A.setCurrent(r)},this.getCurrent=function(){return A.getCurrent()},this.stem=function(){return!!(function(){for(var r,i=A.cursor,e=2;;){for(r=A.cursor;!A.in_grouping(F,97,305);){if(A.cursor>=A.limit)return A.cursor=r,!(e>0||(A.cursor=i,0));A.cursor++}e--}}()&&(A.limit_backward=A.cursor,A.cursor=A.limit,nr(),A.cursor=A.limit,r&&(lr(),A.cursor=A.limit_backward,dr())))}},function(r){return"function"==typeof r.update?r.update(function(r){return n.setCurrent(r),n.stem(),n.getCurrent()}):(n.setCurrent(r),n.stem(),n.getCurrent())}),r.Pipeline.registerFunction(r.tr.stemmer,"stemmer-tr"),r.tr.stopWordFilter=r.generateStopWordFilter("acaba altmış altı ama ancak arada aslında ayrıca bana bazı belki ben benden beni benim beri beş bile bin bir biri birkaç birkez birçok birşey birşeyi biz bizden bize bizi bizim bu buna bunda bundan bunlar bunları bunların bunu bunun burada böyle böylece da daha dahi de defa değil diye diğer doksan dokuz dolayı dolayısıyla dört edecek eden ederek edilecek ediliyor edilmesi ediyor elli en etmesi etti ettiği ettiğini eğer gibi göre halen hangi hatta hem henüz hep hepsi her herhangi herkesin hiç hiçbir iki ile ilgili ise itibaren itibariyle için işte kadar karşın katrilyon kendi kendilerine kendini kendisi kendisine kendisini kez ki kim kimden kime kimi kimse kırk milyar milyon mu mü mı nasıl ne neden nedenle nerde nerede nereye niye niçin o olan olarak oldu olduklarını olduğu olduğunu olmadı olmadığı olmak olması olmayan olmaz olsa olsun olup olur olursa oluyor on ona ondan onlar onlardan onları onların onu onun otuz oysa pek rağmen sadece sanki sekiz seksen sen senden seni senin siz sizden sizi sizin tarafından trilyon tüm var vardı ve veya ya yani yapacak yapmak yaptı yaptıkları yaptığı yaptığını yapılan yapılması yapıyor yedi yerine yetmiş yine yirmi yoksa yüz zaten çok çünkü öyle üzere üç şey şeyden şeyi şeyler şu şuna şunda şundan şunları şunu şöyle".split(" ")),r.Pipeline.registerFunction(r.tr.stopWordFilter,"stopWordFilter-tr")}}); \ No newline at end of file diff --git a/docs/assets/javascripts/lunr/tinyseg.js b/docs/assets/javascripts/lunr/tinyseg.js new file mode 100644 index 000000000..f7ec60326 --- /dev/null +++ b/docs/assets/javascripts/lunr/tinyseg.js @@ -0,0 +1 @@ +!function(_,t){"function"==typeof define&&define.amd?define(t):"object"==typeof exports?module.exports=t():t()(_.lunr)}(this,function(){return function(_){function t(){var _={"[一二三四五六七八九十百千万億兆]":"M","[一-龠々〆ヵヶ]":"H","[ぁ-ん]":"I","[ァ-ヴーア-ン゙ー]":"K","[a-zA-Za-zA-Z]":"A","[0-90-9]":"N"};for(var t in this.chartype_=[],_){var H=new RegExp;H.compile(t),this.chartype_.push([H,_[t]])}return this.BIAS__=-332,this.BC1__={HH:6,II:2461,KH:406,OH:-1378},this.BC2__={AA:-3267,AI:2744,AN:-878,HH:-4070,HM:-1711,HN:4012,HO:3761,IA:1327,IH:-1184,II:-1332,IK:1721,IO:5492,KI:3831,KK:-8741,MH:-3132,MK:3334,OO:-2920},this.BC3__={HH:996,HI:626,HK:-721,HN:-1307,HO:-836,IH:-301,KK:2762,MK:1079,MM:4034,OA:-1652,OH:266},this.BP1__={BB:295,OB:304,OO:-125,UB:352},this.BP2__={BO:60,OO:-1762},this.BQ1__={BHH:1150,BHM:1521,BII:-1158,BIM:886,BMH:1208,BNH:449,BOH:-91,BOO:-2597,OHI:451,OIH:-296,OKA:1851,OKH:-1020,OKK:904,OOO:2965},this.BQ2__={BHH:118,BHI:-1159,BHM:466,BIH:-919,BKK:-1720,BKO:864,OHH:-1139,OHM:-181,OIH:153,UHI:-1146},this.BQ3__={BHH:-792,BHI:2664,BII:-299,BKI:419,BMH:937,BMM:8335,BNN:998,BOH:775,OHH:2174,OHM:439,OII:280,OKH:1798,OKI:-793,OKO:-2242,OMH:-2402,OOO:11699},this.BQ4__={BHH:-3895,BIH:3761,BII:-4654,BIK:1348,BKK:-1806,BMI:-3385,BOO:-12396,OAH:926,OHH:266,OHK:-2036,ONN:-973},this.BW1__={",と":660,",同":727,"B1あ":1404,"B1同":542,"、と":660,"、同":727,"」と":1682,"あっ":1505,"いう":1743,"いっ":-2055,"いる":672,"うし":-4817,"うん":665,"から":3472,"がら":600,"こう":-790,"こと":2083,"こん":-1262,"さら":-4143,"さん":4573,"した":2641,"して":1104,"すで":-3399,"そこ":1977,"それ":-871,"たち":1122,"ため":601,"った":3463,"つい":-802,"てい":805,"てき":1249,"でき":1127,"です":3445,"では":844,"とい":-4915,"とみ":1922,"どこ":3887,"ない":5713,"なっ":3015,"など":7379,"なん":-1113,"にし":2468,"には":1498,"にも":1671,"に対":-912,"の一":-501,"の中":741,"ませ":2448,"まで":1711,"まま":2600,"まる":-2155,"やむ":-1947,"よっ":-2565,"れた":2369,"れで":-913,"をし":1860,"を見":731,"亡く":-1886,"京都":2558,"取り":-2784,"大き":-2604,"大阪":1497,"平方":-2314,"引き":-1336,"日本":-195,"本当":-2423,"毎日":-2113,"目指":-724,"B1あ":1404,"B1同":542,"」と":1682},this.BW2__={"..":-11822,11:-669,"――":-5730,"−−":-13175,"いう":-1609,"うか":2490,"かし":-1350,"かも":-602,"から":-7194,"かれ":4612,"がい":853,"がら":-3198,"きた":1941,"くな":-1597,"こと":-8392,"この":-4193,"させ":4533,"され":13168,"さん":-3977,"しい":-1819,"しか":-545,"した":5078,"して":972,"しな":939,"その":-3744,"たい":-1253,"たた":-662,"ただ":-3857,"たち":-786,"たと":1224,"たは":-939,"った":4589,"って":1647,"っと":-2094,"てい":6144,"てき":3640,"てく":2551,"ては":-3110,"ても":-3065,"でい":2666,"でき":-1528,"でし":-3828,"です":-4761,"でも":-4203,"とい":1890,"とこ":-1746,"とと":-2279,"との":720,"とみ":5168,"とも":-3941,"ない":-2488,"なが":-1313,"など":-6509,"なの":2614,"なん":3099,"にお":-1615,"にし":2748,"にな":2454,"によ":-7236,"に対":-14943,"に従":-4688,"に関":-11388,"のか":2093,"ので":-7059,"のに":-6041,"のの":-6125,"はい":1073,"はが":-1033,"はず":-2532,"ばれ":1813,"まし":-1316,"まで":-6621,"まれ":5409,"めて":-3153,"もい":2230,"もの":-10713,"らか":-944,"らし":-1611,"らに":-1897,"りし":651,"りま":1620,"れた":4270,"れて":849,"れば":4114,"ろう":6067,"われ":7901,"を通":-11877,"んだ":728,"んな":-4115,"一人":602,"一方":-1375,"一日":970,"一部":-1051,"上が":-4479,"会社":-1116,"出て":2163,"分の":-7758,"同党":970,"同日":-913,"大阪":-2471,"委員":-1250,"少な":-1050,"年度":-8669,"年間":-1626,"府県":-2363,"手権":-1982,"新聞":-4066,"日新":-722,"日本":-7068,"日米":3372,"曜日":-601,"朝鮮":-2355,"本人":-2697,"東京":-1543,"然と":-1384,"社会":-1276,"立て":-990,"第に":-1612,"米国":-4268,"11":-669},this.BW3__={"あた":-2194,"あり":719,"ある":3846,"い.":-1185,"い。":-1185,"いい":5308,"いえ":2079,"いく":3029,"いた":2056,"いっ":1883,"いる":5600,"いわ":1527,"うち":1117,"うと":4798,"えと":1454,"か.":2857,"か。":2857,"かけ":-743,"かっ":-4098,"かに":-669,"から":6520,"かり":-2670,"が,":1816,"が、":1816,"がき":-4855,"がけ":-1127,"がっ":-913,"がら":-4977,"がり":-2064,"きた":1645,"けど":1374,"こと":7397,"この":1542,"ころ":-2757,"さい":-714,"さを":976,"し,":1557,"し、":1557,"しい":-3714,"した":3562,"して":1449,"しな":2608,"しま":1200,"す.":-1310,"す。":-1310,"する":6521,"ず,":3426,"ず、":3426,"ずに":841,"そう":428,"た.":8875,"た。":8875,"たい":-594,"たの":812,"たり":-1183,"たる":-853,"だ.":4098,"だ。":4098,"だっ":1004,"った":-4748,"って":300,"てい":6240,"てお":855,"ても":302,"です":1437,"でに":-1482,"では":2295,"とう":-1387,"とし":2266,"との":541,"とも":-3543,"どう":4664,"ない":1796,"なく":-903,"など":2135,"に,":-1021,"に、":-1021,"にし":1771,"にな":1906,"には":2644,"の,":-724,"の、":-724,"の子":-1e3,"は,":1337,"は、":1337,"べき":2181,"まし":1113,"ます":6943,"まっ":-1549,"まで":6154,"まれ":-793,"らし":1479,"られ":6820,"るる":3818,"れ,":854,"れ、":854,"れた":1850,"れて":1375,"れば":-3246,"れる":1091,"われ":-605,"んだ":606,"んで":798,"カ月":990,"会議":860,"入り":1232,"大会":2217,"始め":1681,"市":965,"新聞":-5055,"日,":974,"日、":974,"社会":2024,"カ月":990},this.TC1__={AAA:1093,HHH:1029,HHM:580,HII:998,HOH:-390,HOM:-331,IHI:1169,IOH:-142,IOI:-1015,IOM:467,MMH:187,OOI:-1832},this.TC2__={HHO:2088,HII:-1023,HMM:-1154,IHI:-1965,KKH:703,OII:-2649},this.TC3__={AAA:-294,HHH:346,HHI:-341,HII:-1088,HIK:731,HOH:-1486,IHH:128,IHI:-3041,IHO:-1935,IIH:-825,IIM:-1035,IOI:-542,KHH:-1216,KKA:491,KKH:-1217,KOK:-1009,MHH:-2694,MHM:-457,MHO:123,MMH:-471,NNH:-1689,NNO:662,OHO:-3393},this.TC4__={HHH:-203,HHI:1344,HHK:365,HHM:-122,HHN:182,HHO:669,HIH:804,HII:679,HOH:446,IHH:695,IHO:-2324,IIH:321,III:1497,IIO:656,IOO:54,KAK:4845,KKA:3386,KKK:3065,MHH:-405,MHI:201,MMH:-241,MMM:661,MOM:841},this.TQ1__={BHHH:-227,BHHI:316,BHIH:-132,BIHH:60,BIII:1595,BNHH:-744,BOHH:225,BOOO:-908,OAKK:482,OHHH:281,OHIH:249,OIHI:200,OIIH:-68},this.TQ2__={BIHH:-1401,BIII:-1033,BKAK:-543,BOOO:-5591},this.TQ3__={BHHH:478,BHHM:-1073,BHIH:222,BHII:-504,BIIH:-116,BIII:-105,BMHI:-863,BMHM:-464,BOMH:620,OHHH:346,OHHI:1729,OHII:997,OHMH:481,OIHH:623,OIIH:1344,OKAK:2792,OKHH:587,OKKA:679,OOHH:110,OOII:-685},this.TQ4__={BHHH:-721,BHHM:-3604,BHII:-966,BIIH:-607,BIII:-2181,OAAA:-2763,OAKK:180,OHHH:-294,OHHI:2446,OHHO:480,OHIH:-1573,OIHH:1935,OIHI:-493,OIIH:626,OIII:-4007,OKAK:-8156},this.TW1__={"につい":-4681,"東京都":2026},this.TW2__={"ある程":-2049,"いった":-1256,"ころが":-2434,"しょう":3873,"その後":-4430,"だって":-1049,"ていた":1833,"として":-4657,"ともに":-4517,"もので":1882,"一気に":-792,"初めて":-1512,"同時に":-8097,"大きな":-1255,"対して":-2721,"社会党":-3216},this.TW3__={"いただ":-1734,"してい":1314,"として":-4314,"につい":-5483,"にとっ":-5989,"に当た":-6247,"ので,":-727,"ので、":-727,"のもの":-600,"れから":-3752,"十二月":-2287},this.TW4__={"いう.":8576,"いう。":8576,"からな":-2348,"してい":2958,"たが,":1516,"たが、":1516,"ている":1538,"という":1349,"ました":5543,"ません":1097,"ようと":-4258,"よると":5865},this.UC1__={A:484,K:93,M:645,O:-505},this.UC2__={A:819,H:1059,I:409,M:3987,N:5775,O:646},this.UC3__={A:-1370,I:2311},this.UC4__={A:-2643,H:1809,I:-1032,K:-3450,M:3565,N:3876,O:6646},this.UC5__={H:313,I:-1238,K:-799,M:539,O:-831},this.UC6__={H:-506,I:-253,K:87,M:247,O:-387},this.UP1__={O:-214},this.UP2__={B:69,O:935},this.UP3__={B:189},this.UQ1__={BH:21,BI:-12,BK:-99,BN:142,BO:-56,OH:-95,OI:477,OK:410,OO:-2422},this.UQ2__={BH:216,BI:113,OK:1759},this.UQ3__={BA:-479,BH:42,BI:1913,BK:-7198,BM:3160,BN:6427,BO:14761,OI:-827,ON:-3212},this.UW1__={",":156,"、":156,"「":-463,"あ":-941,"う":-127,"が":-553,"き":121,"こ":505,"で":-201,"と":-547,"ど":-123,"に":-789,"の":-185,"は":-847,"も":-466,"や":-470,"よ":182,"ら":-292,"り":208,"れ":169,"を":-446,"ん":-137,"・":-135,"主":-402,"京":-268,"区":-912,"午":871,"国":-460,"大":561,"委":729,"市":-411,"日":-141,"理":361,"生":-408,"県":-386,"都":-718,"「":-463,"・":-135},this.UW2__={",":-829,"、":-829,"〇":892,"「":-645,"」":3145,"あ":-538,"い":505,"う":134,"お":-502,"か":1454,"が":-856,"く":-412,"こ":1141,"さ":878,"ざ":540,"し":1529,"す":-675,"せ":300,"そ":-1011,"た":188,"だ":1837,"つ":-949,"て":-291,"で":-268,"と":-981,"ど":1273,"な":1063,"に":-1764,"の":130,"は":-409,"ひ":-1273,"べ":1261,"ま":600,"も":-1263,"や":-402,"よ":1639,"り":-579,"る":-694,"れ":571,"を":-2516,"ん":2095,"ア":-587,"カ":306,"キ":568,"ッ":831,"三":-758,"不":-2150,"世":-302,"中":-968,"主":-861,"事":492,"人":-123,"会":978,"保":362,"入":548,"初":-3025,"副":-1566,"北":-3414,"区":-422,"大":-1769,"天":-865,"太":-483,"子":-1519,"学":760,"実":1023,"小":-2009,"市":-813,"年":-1060,"強":1067,"手":-1519,"揺":-1033,"政":1522,"文":-1355,"新":-1682,"日":-1815,"明":-1462,"最":-630,"朝":-1843,"本":-1650,"東":-931,"果":-665,"次":-2378,"民":-180,"気":-1740,"理":752,"発":529,"目":-1584,"相":-242,"県":-1165,"立":-763,"第":810,"米":509,"自":-1353,"行":838,"西":-744,"見":-3874,"調":1010,"議":1198,"込":3041,"開":1758,"間":-1257,"「":-645,"」":3145,"ッ":831,"ア":-587,"カ":306,"キ":568},this.UW3__={",":4889,1:-800,"−":-1723,"、":4889,"々":-2311,"〇":5827,"」":2670,"〓":-3573,"あ":-2696,"い":1006,"う":2342,"え":1983,"お":-4864,"か":-1163,"が":3271,"く":1004,"け":388,"げ":401,"こ":-3552,"ご":-3116,"さ":-1058,"し":-395,"す":584,"せ":3685,"そ":-5228,"た":842,"ち":-521,"っ":-1444,"つ":-1081,"て":6167,"で":2318,"と":1691,"ど":-899,"な":-2788,"に":2745,"の":4056,"は":4555,"ひ":-2171,"ふ":-1798,"へ":1199,"ほ":-5516,"ま":-4384,"み":-120,"め":1205,"も":2323,"や":-788,"よ":-202,"ら":727,"り":649,"る":5905,"れ":2773,"わ":-1207,"を":6620,"ん":-518,"ア":551,"グ":1319,"ス":874,"ッ":-1350,"ト":521,"ム":1109,"ル":1591,"ロ":2201,"ン":278,"・":-3794,"一":-1619,"下":-1759,"世":-2087,"両":3815,"中":653,"主":-758,"予":-1193,"二":974,"人":2742,"今":792,"他":1889,"以":-1368,"低":811,"何":4265,"作":-361,"保":-2439,"元":4858,"党":3593,"全":1574,"公":-3030,"六":755,"共":-1880,"円":5807,"再":3095,"分":457,"初":2475,"別":1129,"前":2286,"副":4437,"力":365,"動":-949,"務":-1872,"化":1327,"北":-1038,"区":4646,"千":-2309,"午":-783,"協":-1006,"口":483,"右":1233,"各":3588,"合":-241,"同":3906,"和":-837,"員":4513,"国":642,"型":1389,"場":1219,"外":-241,"妻":2016,"学":-1356,"安":-423,"実":-1008,"家":1078,"小":-513,"少":-3102,"州":1155,"市":3197,"平":-1804,"年":2416,"広":-1030,"府":1605,"度":1452,"建":-2352,"当":-3885,"得":1905,"思":-1291,"性":1822,"戸":-488,"指":-3973,"政":-2013,"教":-1479,"数":3222,"文":-1489,"新":1764,"日":2099,"旧":5792,"昨":-661,"時":-1248,"曜":-951,"最":-937,"月":4125,"期":360,"李":3094,"村":364,"東":-805,"核":5156,"森":2438,"業":484,"氏":2613,"民":-1694,"決":-1073,"法":1868,"海":-495,"無":979,"物":461,"特":-3850,"生":-273,"用":914,"町":1215,"的":7313,"直":-1835,"省":792,"県":6293,"知":-1528,"私":4231,"税":401,"立":-960,"第":1201,"米":7767,"系":3066,"約":3663,"級":1384,"統":-4229,"総":1163,"線":1255,"者":6457,"能":725,"自":-2869,"英":785,"見":1044,"調":-562,"財":-733,"費":1777,"車":1835,"軍":1375,"込":-1504,"通":-1136,"選":-681,"郎":1026,"郡":4404,"部":1200,"金":2163,"長":421,"開":-1432,"間":1302,"関":-1282,"雨":2009,"電":-1045,"非":2066,"駅":1620,"1":-800,"」":2670,"・":-3794,"ッ":-1350,"ア":551,"グ":1319,"ス":874,"ト":521,"ム":1109,"ル":1591,"ロ":2201,"ン":278},this.UW4__={",":3930,".":3508,"―":-4841,"、":3930,"。":3508,"〇":4999,"「":1895,"」":3798,"〓":-5156,"あ":4752,"い":-3435,"う":-640,"え":-2514,"お":2405,"か":530,"が":6006,"き":-4482,"ぎ":-3821,"く":-3788,"け":-4376,"げ":-4734,"こ":2255,"ご":1979,"さ":2864,"し":-843,"じ":-2506,"す":-731,"ず":1251,"せ":181,"そ":4091,"た":5034,"だ":5408,"ち":-3654,"っ":-5882,"つ":-1659,"て":3994,"で":7410,"と":4547,"な":5433,"に":6499,"ぬ":1853,"ね":1413,"の":7396,"は":8578,"ば":1940,"ひ":4249,"び":-4134,"ふ":1345,"へ":6665,"べ":-744,"ほ":1464,"ま":1051,"み":-2082,"む":-882,"め":-5046,"も":4169,"ゃ":-2666,"や":2795,"ょ":-1544,"よ":3351,"ら":-2922,"り":-9726,"る":-14896,"れ":-2613,"ろ":-4570,"わ":-1783,"を":13150,"ん":-2352,"カ":2145,"コ":1789,"セ":1287,"ッ":-724,"ト":-403,"メ":-1635,"ラ":-881,"リ":-541,"ル":-856,"ン":-3637,"・":-4371,"ー":-11870,"一":-2069,"中":2210,"予":782,"事":-190,"井":-1768,"人":1036,"以":544,"会":950,"体":-1286,"作":530,"側":4292,"先":601,"党":-2006,"共":-1212,"内":584,"円":788,"初":1347,"前":1623,"副":3879,"力":-302,"動":-740,"務":-2715,"化":776,"区":4517,"協":1013,"参":1555,"合":-1834,"和":-681,"員":-910,"器":-851,"回":1500,"国":-619,"園":-1200,"地":866,"場":-1410,"塁":-2094,"士":-1413,"多":1067,"大":571,"子":-4802,"学":-1397,"定":-1057,"寺":-809,"小":1910,"屋":-1328,"山":-1500,"島":-2056,"川":-2667,"市":2771,"年":374,"庁":-4556,"後":456,"性":553,"感":916,"所":-1566,"支":856,"改":787,"政":2182,"教":704,"文":522,"方":-856,"日":1798,"時":1829,"最":845,"月":-9066,"木":-485,"来":-442,"校":-360,"業":-1043,"氏":5388,"民":-2716,"気":-910,"沢":-939,"済":-543,"物":-735,"率":672,"球":-1267,"生":-1286,"産":-1101,"田":-2900,"町":1826,"的":2586,"目":922,"省":-3485,"県":2997,"空":-867,"立":-2112,"第":788,"米":2937,"系":786,"約":2171,"経":1146,"統":-1169,"総":940,"線":-994,"署":749,"者":2145,"能":-730,"般":-852,"行":-792,"規":792,"警":-1184,"議":-244,"谷":-1e3,"賞":730,"車":-1481,"軍":1158,"輪":-1433,"込":-3370,"近":929,"道":-1291,"選":2596,"郎":-4866,"都":1192,"野":-1100,"銀":-2213,"長":357,"間":-2344,"院":-2297,"際":-2604,"電":-878,"領":-1659,"題":-792,"館":-1984,"首":1749,"高":2120,"「":1895,"」":3798,"・":-4371,"ッ":-724,"ー":-11870,"カ":2145,"コ":1789,"セ":1287,"ト":-403,"メ":-1635,"ラ":-881,"リ":-541,"ル":-856,"ン":-3637},this.UW5__={",":465,".":-299,1:-514,E2:-32768,"]":-2762,"、":465,"。":-299,"「":363,"あ":1655,"い":331,"う":-503,"え":1199,"お":527,"か":647,"が":-421,"き":1624,"ぎ":1971,"く":312,"げ":-983,"さ":-1537,"し":-1371,"す":-852,"だ":-1186,"ち":1093,"っ":52,"つ":921,"て":-18,"で":-850,"と":-127,"ど":1682,"な":-787,"に":-1224,"の":-635,"は":-578,"べ":1001,"み":502,"め":865,"ゃ":3350,"ょ":854,"り":-208,"る":429,"れ":504,"わ":419,"を":-1264,"ん":327,"イ":241,"ル":451,"ン":-343,"中":-871,"京":722,"会":-1153,"党":-654,"務":3519,"区":-901,"告":848,"員":2104,"大":-1296,"学":-548,"定":1785,"嵐":-1304,"市":-2991,"席":921,"年":1763,"思":872,"所":-814,"挙":1618,"新":-1682,"日":218,"月":-4353,"査":932,"格":1356,"機":-1508,"氏":-1347,"田":240,"町":-3912,"的":-3149,"相":1319,"省":-1052,"県":-4003,"研":-997,"社":-278,"空":-813,"統":1955,"者":-2233,"表":663,"語":-1073,"議":1219,"選":-1018,"郎":-368,"長":786,"間":1191,"題":2368,"館":-689,"1":-514,"E2":-32768,"「":363,"イ":241,"ル":451,"ン":-343},this.UW6__={",":227,".":808,1:-270,E1:306,"、":227,"。":808,"あ":-307,"う":189,"か":241,"が":-73,"く":-121,"こ":-200,"じ":1782,"す":383,"た":-428,"っ":573,"て":-1014,"で":101,"と":-105,"な":-253,"に":-149,"の":-417,"は":-236,"も":-206,"り":187,"る":-135,"を":195,"ル":-673,"ン":-496,"一":-277,"中":201,"件":-800,"会":624,"前":302,"区":1792,"員":-1212,"委":798,"学":-960,"市":887,"広":-695,"後":535,"業":-697,"相":753,"社":-507,"福":974,"空":-822,"者":1811,"連":463,"郎":1082,"1":-270,"E1":306,"ル":-673,"ン":-496},this}t.prototype.ctype_=function(_){for(var t in this.chartype_)if(_.match(this.chartype_[t][0]))return this.chartype_[t][1];return"O"},t.prototype.ts_=function(_){return _||0},t.prototype.segment=function(_){if(null==_||null==_||""==_)return[];var t=[],H=["B3","B2","B1"],s=["O","O","O"],h=_.split("");for(K=0;K0&&(t.push(i),i="",N="B"),I=O,O=B,B=N,i+=H[K]}return t.push(i),t},_.TinySegmenter=t}}); \ No newline at end of file diff --git a/docs/assets/javascripts/modernizr.1aa3b519.js b/docs/assets/javascripts/modernizr.1aa3b519.js new file mode 100644 index 000000000..14e111fc3 --- /dev/null +++ b/docs/assets/javascripts/modernizr.1aa3b519.js @@ -0,0 +1 @@ +!function(e,t){for(var n in t)e[n]=t[n]}(window,function(e){function t(r){if(n[r])return n[r].exports;var o=n[r]={i:r,l:!1,exports:{}};return e[r].call(o.exports,o,o.exports,t),o.l=!0,o.exports}var n={};return t.m=e,t.c=n,t.d=function(e,n,r){t.o(e,n)||Object.defineProperty(e,n,{configurable:!1,enumerable:!0,get:r})},t.n=function(e){var n=e&&e.__esModule?function(){return e.default}:function(){return e};return t.d(n,"a",n),n},t.o=function(e,t){return Object.prototype.hasOwnProperty.call(e,t)},t.p="",t(t.s=4)}({4:function(e,t,n){"use strict";n(5)},5:function(e,t){!function(t){!function(e,t,n){function r(e,t){return typeof e===t}function o(e){var t=_.className,n=C._config.classPrefix||"";if(T&&(t=t.baseVal),C._config.enableJSClass){var r=new RegExp("(^|\\s)"+n+"no-js(\\s|$)");t=t.replace(r,"$1"+n+"js$2")}C._config.enableClasses&&(t+=" "+n+e.join(" "+n),T?_.className.baseVal=t:_.className=t)}function i(e,t){if("object"==typeof e)for(var n in e)b(e,n)&&i(n,e[n]);else{e=e.toLowerCase();var r=e.split("."),s=C[r[0]];if(2==r.length&&(s=s[r[1]]),void 0!==s)return C;t="function"==typeof t?t():t,1==r.length?C[r[0]]=t:(!C[r[0]]||C[r[0]]instanceof Boolean||(C[r[0]]=new Boolean(C[r[0]])),C[r[0]][r[1]]=t),o([(t&&0!=t?"":"no-")+r.join("-")]),C._trigger(e,t)}return C}function s(){return"function"!=typeof t.createElement?t.createElement(arguments[0]):T?t.createElementNS.call(t,"http://www.w3.org/2000/svg",arguments[0]):t.createElement.apply(t,arguments)}function a(){var e=t.body;return e||(e=s(T?"svg":"body"),e.fake=!0),e}function u(e,n,r,o){var i,u,l,f,c="modernizr",d=s("div"),p=a();if(parseInt(r,10))for(;r--;)l=s("div"),l.id=o?o[r]:c+(r+1),d.appendChild(l);return i=s("style"),i.type="text/css",i.id="s"+c,(p.fake?p:d).appendChild(i),p.appendChild(d),i.styleSheet?i.styleSheet.cssText=e:i.appendChild(t.createTextNode(e)),d.id=c,p.fake&&(p.style.background="",p.style.overflow="hidden",f=_.style.overflow,_.style.overflow="hidden",_.appendChild(p)),u=n(d,e),p.fake?(p.parentNode.removeChild(p),_.style.overflow=f,_.offsetHeight):d.parentNode.removeChild(d),!!u}function l(e,t){return!!~(""+e).indexOf(t)}function f(e){return e.replace(/([A-Z])/g,function(e,t){return"-"+t.toLowerCase()}).replace(/^ms-/,"-ms-")}function c(t,n,r){var o;if("getComputedStyle"in e){o=getComputedStyle.call(e,t,n);var i=e.console;if(null!==o)r&&(o=o.getPropertyValue(r));else if(i){var s=i.error?"error":"log";i[s].call(i,"getComputedStyle returning null, its possible modernizr test results are inaccurate")}}else o=!n&&t.currentStyle&&t.currentStyle[r];return o}function d(t,r){var o=t.length;if("CSS"in e&&"supports"in e.CSS){for(;o--;)if(e.CSS.supports(f(t[o]),r))return!0;return!1}if("CSSSupportsRule"in e){for(var i=[];o--;)i.push("("+f(t[o])+":"+r+")");return i=i.join(" or "),u("@supports ("+i+") { #modernizr { position: absolute; } }",function(e){return"absolute"==c(e,null,"position")})}return n}function p(e){return e.replace(/([a-z])-([a-z])/g,function(e,t,n){return t+n.toUpperCase()}).replace(/^-/,"")}function h(e,t,o,i){function a(){f&&(delete j.style,delete j.modElem)}if(i=!r(i,"undefined")&&i,!r(o,"undefined")){var u=d(e,o);if(!r(u,"undefined"))return u}for(var f,c,h,m,v,g=["modernizr","tspan","samp"];!j.style&&g.length;)f=!0,j.modElem=s(g.shift()),j.style=j.modElem.style;for(h=e.length,c=0;h>c;c++)if(m=e[c],v=j.style[m],l(m,"-")&&(m=p(m)),j.style[m]!==n){if(i||r(o,"undefined"))return a(),"pfx"!=t||m;try{j.style[m]=o}catch(e){}if(j.style[m]!=v)return a(),"pfx"!=t||m}return a(),!1}function m(e,t){return function(){return e.apply(t,arguments)}}function v(e,t,n){var o;for(var i in e)if(e[i]in t)return!1===n?e[i]:(o=t[e[i]],r(o,"function")?m(o,n||t):o);return!1}function g(e,t,n,o,i){var s=e.charAt(0).toUpperCase()+e.slice(1),a=(e+" "+k.join(s+" ")+s).split(" ");return r(t,"string")||r(t,"undefined")?h(a,t,o,i):(a=(e+" "+A.join(s+" ")+s).split(" "),v(a,t,n))}function y(e,t,r){return g(e,n,n,t,r)}var w=[],S={_version:"3.5.0",_config:{classPrefix:"",enableClasses:!0,enableJSClass:!0,usePrefixes:!0},_q:[],on:function(e,t){var n=this;setTimeout(function(){t(n[e])},0)},addTest:function(e,t,n){w.push({name:e,fn:t,options:n})},addAsyncTest:function(e){w.push({name:null,fn:e})}},C=function(){};C.prototype=S,C=new C;var b,x=[],_=t.documentElement,T="svg"===_.nodeName.toLowerCase();!function(){var e={}.hasOwnProperty;b=r(e,"undefined")||r(e.call,"undefined")?function(e,t){return t in e&&r(e.constructor.prototype[t],"undefined")}:function(t,n){return e.call(t,n)}}(),S._l={},S.on=function(e,t){this._l[e]||(this._l[e]=[]),this._l[e].push(t),C.hasOwnProperty(e)&&setTimeout(function(){C._trigger(e,C[e])},0)},S._trigger=function(e,t){if(this._l[e]){var n=this._l[e];setTimeout(function(){var e;for(e=0;e.md-nav__link{color:inherit}button[data-md-color-primary=pink]{background-color:#e91e63}[data-md-color-primary=pink] .md-typeset a{color:#e91e63}[data-md-color-primary=pink] .md-header,[data-md-color-primary=pink] .md-hero{background-color:#e91e63}[data-md-color-primary=pink] .md-nav__link--active,[data-md-color-primary=pink] .md-nav__link:active{color:#e91e63}[data-md-color-primary=pink] .md-nav__item--nested>.md-nav__link{color:inherit}button[data-md-color-primary=purple]{background-color:#ab47bc}[data-md-color-primary=purple] .md-typeset a{color:#ab47bc}[data-md-color-primary=purple] .md-header,[data-md-color-primary=purple] .md-hero{background-color:#ab47bc}[data-md-color-primary=purple] .md-nav__link--active,[data-md-color-primary=purple] .md-nav__link:active{color:#ab47bc}[data-md-color-primary=purple] .md-nav__item--nested>.md-nav__link{color:inherit}button[data-md-color-primary=deep-purple]{background-color:#7e57c2}[data-md-color-primary=deep-purple] .md-typeset a{color:#7e57c2}[data-md-color-primary=deep-purple] .md-header,[data-md-color-primary=deep-purple] .md-hero{background-color:#7e57c2}[data-md-color-primary=deep-purple] .md-nav__link--active,[data-md-color-primary=deep-purple] .md-nav__link:active{color:#7e57c2}[data-md-color-primary=deep-purple] .md-nav__item--nested>.md-nav__link{color:inherit}button[data-md-color-primary=indigo]{background-color:#3f51b5}[data-md-color-primary=indigo] .md-typeset a{color:#3f51b5}[data-md-color-primary=indigo] .md-header,[data-md-color-primary=indigo] .md-hero{background-color:#3f51b5}[data-md-color-primary=indigo] .md-nav__link--active,[data-md-color-primary=indigo] .md-nav__link:active{color:#3f51b5}[data-md-color-primary=indigo] .md-nav__item--nested>.md-nav__link{color:inherit}button[data-md-color-primary=blue]{background-color:#2196f3}[data-md-color-primary=blue] .md-typeset a{color:#2196f3}[data-md-color-primary=blue] .md-header,[data-md-color-primary=blue] .md-hero{background-color:#2196f3}[data-md-color-primary=blue] .md-nav__link--active,[data-md-color-primary=blue] .md-nav__link:active{color:#2196f3}[data-md-color-primary=blue] .md-nav__item--nested>.md-nav__link{color:inherit}button[data-md-color-primary=light-blue]{background-color:#03a9f4}[data-md-color-primary=light-blue] .md-typeset a{color:#03a9f4}[data-md-color-primary=light-blue] .md-header,[data-md-color-primary=light-blue] .md-hero{background-color:#03a9f4}[data-md-color-primary=light-blue] .md-nav__link--active,[data-md-color-primary=light-blue] .md-nav__link:active{color:#03a9f4}[data-md-color-primary=light-blue] .md-nav__item--nested>.md-nav__link{color:inherit}button[data-md-color-primary=cyan]{background-color:#00bcd4}[data-md-color-primary=cyan] .md-typeset a{color:#00bcd4}[data-md-color-primary=cyan] .md-header,[data-md-color-primary=cyan] .md-hero{background-color:#00bcd4}[data-md-color-primary=cyan] .md-nav__link--active,[data-md-color-primary=cyan] .md-nav__link:active{color:#00bcd4}[data-md-color-primary=cyan] .md-nav__item--nested>.md-nav__link{color:inherit}button[data-md-color-primary=teal]{background-color:#009688}[data-md-color-primary=teal] .md-typeset a{color:#009688}[data-md-color-primary=teal] .md-header,[data-md-color-primary=teal] .md-hero{background-color:#009688}[data-md-color-primary=teal] .md-nav__link--active,[data-md-color-primary=teal] .md-nav__link:active{color:#009688}[data-md-color-primary=teal] .md-nav__item--nested>.md-nav__link{color:inherit}button[data-md-color-primary=green]{background-color:#4caf50}[data-md-color-primary=green] .md-typeset a{color:#4caf50}[data-md-color-primary=green] .md-header,[data-md-color-primary=green] .md-hero{background-color:#4caf50}[data-md-color-primary=green] .md-nav__link--active,[data-md-color-primary=green] .md-nav__link:active{color:#4caf50}[data-md-color-primary=green] .md-nav__item--nested>.md-nav__link{color:inherit}button[data-md-color-primary=light-green]{background-color:#7cb342}[data-md-color-primary=light-green] .md-typeset a{color:#7cb342}[data-md-color-primary=light-green] .md-header,[data-md-color-primary=light-green] .md-hero{background-color:#7cb342}[data-md-color-primary=light-green] .md-nav__link--active,[data-md-color-primary=light-green] .md-nav__link:active{color:#7cb342}[data-md-color-primary=light-green] .md-nav__item--nested>.md-nav__link{color:inherit}button[data-md-color-primary=lime]{background-color:#c0ca33}[data-md-color-primary=lime] .md-typeset a{color:#c0ca33}[data-md-color-primary=lime] .md-header,[data-md-color-primary=lime] .md-hero{background-color:#c0ca33}[data-md-color-primary=lime] .md-nav__link--active,[data-md-color-primary=lime] .md-nav__link:active{color:#c0ca33}[data-md-color-primary=lime] .md-nav__item--nested>.md-nav__link{color:inherit}button[data-md-color-primary=yellow]{background-color:#f9a825}[data-md-color-primary=yellow] .md-typeset a{color:#f9a825}[data-md-color-primary=yellow] .md-header,[data-md-color-primary=yellow] .md-hero{background-color:#f9a825}[data-md-color-primary=yellow] .md-nav__link--active,[data-md-color-primary=yellow] .md-nav__link:active{color:#f9a825}[data-md-color-primary=yellow] .md-nav__item--nested>.md-nav__link{color:inherit}button[data-md-color-primary=amber]{background-color:#ffa000}[data-md-color-primary=amber] .md-typeset a{color:#ffa000}[data-md-color-primary=amber] .md-header,[data-md-color-primary=amber] .md-hero{background-color:#ffa000}[data-md-color-primary=amber] .md-nav__link--active,[data-md-color-primary=amber] .md-nav__link:active{color:#ffa000}[data-md-color-primary=amber] .md-nav__item--nested>.md-nav__link{color:inherit}button[data-md-color-primary=orange]{background-color:#fb8c00}[data-md-color-primary=orange] .md-typeset a{color:#fb8c00}[data-md-color-primary=orange] .md-header,[data-md-color-primary=orange] .md-hero{background-color:#fb8c00}[data-md-color-primary=orange] .md-nav__link--active,[data-md-color-primary=orange] .md-nav__link:active{color:#fb8c00}[data-md-color-primary=orange] .md-nav__item--nested>.md-nav__link{color:inherit}button[data-md-color-primary=deep-orange]{background-color:#ff7043}[data-md-color-primary=deep-orange] .md-typeset a{color:#ff7043}[data-md-color-primary=deep-orange] .md-header,[data-md-color-primary=deep-orange] .md-hero{background-color:#ff7043}[data-md-color-primary=deep-orange] .md-nav__link--active,[data-md-color-primary=deep-orange] .md-nav__link:active{color:#ff7043}[data-md-color-primary=deep-orange] .md-nav__item--nested>.md-nav__link{color:inherit}button[data-md-color-primary=brown]{background-color:#795548}[data-md-color-primary=brown] .md-typeset a{color:#795548}[data-md-color-primary=brown] .md-header,[data-md-color-primary=brown] .md-hero{background-color:#795548}[data-md-color-primary=brown] .md-nav__link--active,[data-md-color-primary=brown] .md-nav__link:active{color:#795548}[data-md-color-primary=brown] .md-nav__item--nested>.md-nav__link{color:inherit}button[data-md-color-primary=grey]{background-color:#757575}[data-md-color-primary=grey] .md-typeset a{color:#757575}[data-md-color-primary=grey] .md-header,[data-md-color-primary=grey] .md-hero{background-color:#757575}[data-md-color-primary=grey] .md-nav__link--active,[data-md-color-primary=grey] .md-nav__link:active{color:#757575}[data-md-color-primary=grey] .md-nav__item--nested>.md-nav__link{color:inherit}button[data-md-color-primary=blue-grey]{background-color:#546e7a}[data-md-color-primary=blue-grey] .md-typeset a{color:#546e7a}[data-md-color-primary=blue-grey] .md-header,[data-md-color-primary=blue-grey] .md-hero{background-color:#546e7a}[data-md-color-primary=blue-grey] .md-nav__link--active,[data-md-color-primary=blue-grey] .md-nav__link:active{color:#546e7a}[data-md-color-primary=blue-grey] .md-nav__item--nested>.md-nav__link{color:inherit}button[data-md-color-primary=white]{box-shadow:inset 0 0 .1rem rgba(0,0,0,.54)}[data-md-color-primary=white] .md-header,[data-md-color-primary=white] .md-hero,button[data-md-color-primary=white]{background-color:#fff;color:rgba(0,0,0,.87)}[data-md-color-primary=white] .md-hero--expand{border-bottom:.1rem solid rgba(0,0,0,.07)}button[data-md-color-accent=red]{background-color:#ff1744}[data-md-color-accent=red] .md-typeset a:active,[data-md-color-accent=red] .md-typeset a:hover{color:#ff1744}[data-md-color-accent=red] .md-typeset .codehilite pre::-webkit-scrollbar-thumb:hover,[data-md-color-accent=red] .md-typeset pre code::-webkit-scrollbar-thumb:hover{background-color:#ff1744}[data-md-color-accent=red] .md-nav__link:focus,[data-md-color-accent=red] .md-nav__link:hover,[data-md-color-accent=red] .md-typeset .footnote li:hover .footnote-backref:hover,[data-md-color-accent=red] .md-typeset .footnote li:target .footnote-backref,[data-md-color-accent=red] .md-typeset .md-clipboard:active:before,[data-md-color-accent=red] .md-typeset .md-clipboard:hover:before,[data-md-color-accent=red] .md-typeset [id] .headerlink:focus,[data-md-color-accent=red] .md-typeset [id]:hover .headerlink:hover,[data-md-color-accent=red] .md-typeset [id]:target .headerlink{color:#ff1744}[data-md-color-accent=red] .md-search__scrollwrap::-webkit-scrollbar-thumb:hover{background-color:#ff1744}[data-md-color-accent=red] .md-search-result__link:hover,[data-md-color-accent=red] .md-search-result__link[data-md-state=active]{background-color:rgba(255,23,68,.1)}[data-md-color-accent=red] .md-sidebar__scrollwrap::-webkit-scrollbar-thumb:hover{background-color:#ff1744}[data-md-color-accent=red] .md-source-file:hover:before{background-color:#ff1744}button[data-md-color-accent=pink]{background-color:#f50057}[data-md-color-accent=pink] .md-typeset a:active,[data-md-color-accent=pink] .md-typeset a:hover{color:#f50057}[data-md-color-accent=pink] .md-typeset .codehilite pre::-webkit-scrollbar-thumb:hover,[data-md-color-accent=pink] .md-typeset pre code::-webkit-scrollbar-thumb:hover{background-color:#f50057}[data-md-color-accent=pink] .md-nav__link:focus,[data-md-color-accent=pink] .md-nav__link:hover,[data-md-color-accent=pink] .md-typeset .footnote li:hover .footnote-backref:hover,[data-md-color-accent=pink] .md-typeset .footnote li:target .footnote-backref,[data-md-color-accent=pink] .md-typeset .md-clipboard:active:before,[data-md-color-accent=pink] .md-typeset .md-clipboard:hover:before,[data-md-color-accent=pink] .md-typeset [id] .headerlink:focus,[data-md-color-accent=pink] .md-typeset [id]:hover .headerlink:hover,[data-md-color-accent=pink] .md-typeset [id]:target .headerlink{color:#f50057}[data-md-color-accent=pink] .md-search__scrollwrap::-webkit-scrollbar-thumb:hover{background-color:#f50057}[data-md-color-accent=pink] .md-search-result__link:hover,[data-md-color-accent=pink] .md-search-result__link[data-md-state=active]{background-color:rgba(245,0,87,.1)}[data-md-color-accent=pink] .md-sidebar__scrollwrap::-webkit-scrollbar-thumb:hover{background-color:#f50057}[data-md-color-accent=pink] .md-source-file:hover:before{background-color:#f50057}button[data-md-color-accent=purple]{background-color:#e040fb}[data-md-color-accent=purple] .md-typeset a:active,[data-md-color-accent=purple] .md-typeset a:hover{color:#e040fb}[data-md-color-accent=purple] .md-typeset .codehilite pre::-webkit-scrollbar-thumb:hover,[data-md-color-accent=purple] .md-typeset pre code::-webkit-scrollbar-thumb:hover{background-color:#e040fb}[data-md-color-accent=purple] .md-nav__link:focus,[data-md-color-accent=purple] .md-nav__link:hover,[data-md-color-accent=purple] .md-typeset .footnote li:hover .footnote-backref:hover,[data-md-color-accent=purple] .md-typeset .footnote li:target .footnote-backref,[data-md-color-accent=purple] .md-typeset .md-clipboard:active:before,[data-md-color-accent=purple] .md-typeset .md-clipboard:hover:before,[data-md-color-accent=purple] .md-typeset [id] .headerlink:focus,[data-md-color-accent=purple] .md-typeset [id]:hover .headerlink:hover,[data-md-color-accent=purple] .md-typeset [id]:target .headerlink{color:#e040fb}[data-md-color-accent=purple] .md-search__scrollwrap::-webkit-scrollbar-thumb:hover{background-color:#e040fb}[data-md-color-accent=purple] .md-search-result__link:hover,[data-md-color-accent=purple] .md-search-result__link[data-md-state=active]{background-color:rgba(224,64,251,.1)}[data-md-color-accent=purple] .md-sidebar__scrollwrap::-webkit-scrollbar-thumb:hover{background-color:#e040fb}[data-md-color-accent=purple] .md-source-file:hover:before{background-color:#e040fb}button[data-md-color-accent=deep-purple]{background-color:#7c4dff}[data-md-color-accent=deep-purple] .md-typeset a:active,[data-md-color-accent=deep-purple] .md-typeset a:hover{color:#7c4dff}[data-md-color-accent=deep-purple] .md-typeset .codehilite pre::-webkit-scrollbar-thumb:hover,[data-md-color-accent=deep-purple] .md-typeset pre code::-webkit-scrollbar-thumb:hover{background-color:#7c4dff}[data-md-color-accent=deep-purple] .md-nav__link:focus,[data-md-color-accent=deep-purple] .md-nav__link:hover,[data-md-color-accent=deep-purple] .md-typeset .footnote li:hover .footnote-backref:hover,[data-md-color-accent=deep-purple] .md-typeset .footnote li:target .footnote-backref,[data-md-color-accent=deep-purple] .md-typeset .md-clipboard:active:before,[data-md-color-accent=deep-purple] .md-typeset .md-clipboard:hover:before,[data-md-color-accent=deep-purple] .md-typeset [id] .headerlink:focus,[data-md-color-accent=deep-purple] .md-typeset [id]:hover .headerlink:hover,[data-md-color-accent=deep-purple] .md-typeset [id]:target .headerlink{color:#7c4dff}[data-md-color-accent=deep-purple] .md-search__scrollwrap::-webkit-scrollbar-thumb:hover{background-color:#7c4dff}[data-md-color-accent=deep-purple] .md-search-result__link:hover,[data-md-color-accent=deep-purple] .md-search-result__link[data-md-state=active]{background-color:rgba(124,77,255,.1)}[data-md-color-accent=deep-purple] .md-sidebar__scrollwrap::-webkit-scrollbar-thumb:hover{background-color:#7c4dff}[data-md-color-accent=deep-purple] .md-source-file:hover:before{background-color:#7c4dff}button[data-md-color-accent=indigo]{background-color:#536dfe}[data-md-color-accent=indigo] .md-typeset a:active,[data-md-color-accent=indigo] .md-typeset a:hover{color:#536dfe}[data-md-color-accent=indigo] .md-typeset .codehilite pre::-webkit-scrollbar-thumb:hover,[data-md-color-accent=indigo] .md-typeset pre code::-webkit-scrollbar-thumb:hover{background-color:#536dfe}[data-md-color-accent=indigo] .md-nav__link:focus,[data-md-color-accent=indigo] .md-nav__link:hover,[data-md-color-accent=indigo] .md-typeset .footnote li:hover .footnote-backref:hover,[data-md-color-accent=indigo] .md-typeset .footnote li:target .footnote-backref,[data-md-color-accent=indigo] .md-typeset .md-clipboard:active:before,[data-md-color-accent=indigo] .md-typeset .md-clipboard:hover:before,[data-md-color-accent=indigo] .md-typeset [id] .headerlink:focus,[data-md-color-accent=indigo] .md-typeset [id]:hover .headerlink:hover,[data-md-color-accent=indigo] .md-typeset [id]:target .headerlink{color:#536dfe}[data-md-color-accent=indigo] .md-search__scrollwrap::-webkit-scrollbar-thumb:hover{background-color:#536dfe}[data-md-color-accent=indigo] .md-search-result__link:hover,[data-md-color-accent=indigo] .md-search-result__link[data-md-state=active]{background-color:rgba(83,109,254,.1)}[data-md-color-accent=indigo] .md-sidebar__scrollwrap::-webkit-scrollbar-thumb:hover{background-color:#536dfe}[data-md-color-accent=indigo] .md-source-file:hover:before{background-color:#536dfe}button[data-md-color-accent=blue]{background-color:#448aff}[data-md-color-accent=blue] .md-typeset a:active,[data-md-color-accent=blue] .md-typeset a:hover{color:#448aff}[data-md-color-accent=blue] .md-typeset .codehilite pre::-webkit-scrollbar-thumb:hover,[data-md-color-accent=blue] .md-typeset pre code::-webkit-scrollbar-thumb:hover{background-color:#448aff}[data-md-color-accent=blue] .md-nav__link:focus,[data-md-color-accent=blue] .md-nav__link:hover,[data-md-color-accent=blue] .md-typeset .footnote li:hover .footnote-backref:hover,[data-md-color-accent=blue] .md-typeset .footnote li:target .footnote-backref,[data-md-color-accent=blue] .md-typeset .md-clipboard:active:before,[data-md-color-accent=blue] .md-typeset .md-clipboard:hover:before,[data-md-color-accent=blue] .md-typeset [id] .headerlink:focus,[data-md-color-accent=blue] .md-typeset [id]:hover .headerlink:hover,[data-md-color-accent=blue] .md-typeset [id]:target .headerlink{color:#448aff}[data-md-color-accent=blue] .md-search__scrollwrap::-webkit-scrollbar-thumb:hover{background-color:#448aff}[data-md-color-accent=blue] .md-search-result__link:hover,[data-md-color-accent=blue] .md-search-result__link[data-md-state=active]{background-color:rgba(68,138,255,.1)}[data-md-color-accent=blue] .md-sidebar__scrollwrap::-webkit-scrollbar-thumb:hover{background-color:#448aff}[data-md-color-accent=blue] .md-source-file:hover:before{background-color:#448aff}button[data-md-color-accent=light-blue]{background-color:#0091ea}[data-md-color-accent=light-blue] .md-typeset a:active,[data-md-color-accent=light-blue] .md-typeset a:hover{color:#0091ea}[data-md-color-accent=light-blue] .md-typeset .codehilite pre::-webkit-scrollbar-thumb:hover,[data-md-color-accent=light-blue] .md-typeset pre code::-webkit-scrollbar-thumb:hover{background-color:#0091ea}[data-md-color-accent=light-blue] .md-nav__link:focus,[data-md-color-accent=light-blue] .md-nav__link:hover,[data-md-color-accent=light-blue] .md-typeset .footnote li:hover .footnote-backref:hover,[data-md-color-accent=light-blue] .md-typeset .footnote li:target .footnote-backref,[data-md-color-accent=light-blue] .md-typeset .md-clipboard:active:before,[data-md-color-accent=light-blue] .md-typeset .md-clipboard:hover:before,[data-md-color-accent=light-blue] .md-typeset [id] .headerlink:focus,[data-md-color-accent=light-blue] .md-typeset [id]:hover .headerlink:hover,[data-md-color-accent=light-blue] .md-typeset [id]:target .headerlink{color:#0091ea}[data-md-color-accent=light-blue] .md-search__scrollwrap::-webkit-scrollbar-thumb:hover{background-color:#0091ea}[data-md-color-accent=light-blue] .md-search-result__link:hover,[data-md-color-accent=light-blue] .md-search-result__link[data-md-state=active]{background-color:rgba(0,145,234,.1)}[data-md-color-accent=light-blue] .md-sidebar__scrollwrap::-webkit-scrollbar-thumb:hover{background-color:#0091ea}[data-md-color-accent=light-blue] .md-source-file:hover:before{background-color:#0091ea}button[data-md-color-accent=cyan]{background-color:#00b8d4}[data-md-color-accent=cyan] .md-typeset a:active,[data-md-color-accent=cyan] .md-typeset a:hover{color:#00b8d4}[data-md-color-accent=cyan] .md-typeset .codehilite pre::-webkit-scrollbar-thumb:hover,[data-md-color-accent=cyan] .md-typeset pre code::-webkit-scrollbar-thumb:hover{background-color:#00b8d4}[data-md-color-accent=cyan] .md-nav__link:focus,[data-md-color-accent=cyan] .md-nav__link:hover,[data-md-color-accent=cyan] .md-typeset .footnote li:hover .footnote-backref:hover,[data-md-color-accent=cyan] .md-typeset .footnote li:target .footnote-backref,[data-md-color-accent=cyan] .md-typeset .md-clipboard:active:before,[data-md-color-accent=cyan] .md-typeset .md-clipboard:hover:before,[data-md-color-accent=cyan] .md-typeset [id] .headerlink:focus,[data-md-color-accent=cyan] .md-typeset [id]:hover .headerlink:hover,[data-md-color-accent=cyan] .md-typeset [id]:target .headerlink{color:#00b8d4}[data-md-color-accent=cyan] .md-search__scrollwrap::-webkit-scrollbar-thumb:hover{background-color:#00b8d4}[data-md-color-accent=cyan] .md-search-result__link:hover,[data-md-color-accent=cyan] .md-search-result__link[data-md-state=active]{background-color:rgba(0,184,212,.1)}[data-md-color-accent=cyan] .md-sidebar__scrollwrap::-webkit-scrollbar-thumb:hover{background-color:#00b8d4}[data-md-color-accent=cyan] .md-source-file:hover:before{background-color:#00b8d4}button[data-md-color-accent=teal]{background-color:#00bfa5}[data-md-color-accent=teal] .md-typeset a:active,[data-md-color-accent=teal] .md-typeset a:hover{color:#00bfa5}[data-md-color-accent=teal] .md-typeset .codehilite pre::-webkit-scrollbar-thumb:hover,[data-md-color-accent=teal] .md-typeset pre code::-webkit-scrollbar-thumb:hover{background-color:#00bfa5}[data-md-color-accent=teal] .md-nav__link:focus,[data-md-color-accent=teal] .md-nav__link:hover,[data-md-color-accent=teal] .md-typeset .footnote li:hover .footnote-backref:hover,[data-md-color-accent=teal] .md-typeset .footnote li:target .footnote-backref,[data-md-color-accent=teal] .md-typeset .md-clipboard:active:before,[data-md-color-accent=teal] .md-typeset .md-clipboard:hover:before,[data-md-color-accent=teal] .md-typeset [id] .headerlink:focus,[data-md-color-accent=teal] .md-typeset [id]:hover .headerlink:hover,[data-md-color-accent=teal] .md-typeset [id]:target .headerlink{color:#00bfa5}[data-md-color-accent=teal] .md-search__scrollwrap::-webkit-scrollbar-thumb:hover{background-color:#00bfa5}[data-md-color-accent=teal] .md-search-result__link:hover,[data-md-color-accent=teal] .md-search-result__link[data-md-state=active]{background-color:rgba(0,191,165,.1)}[data-md-color-accent=teal] .md-sidebar__scrollwrap::-webkit-scrollbar-thumb:hover{background-color:#00bfa5}[data-md-color-accent=teal] .md-source-file:hover:before{background-color:#00bfa5}button[data-md-color-accent=green]{background-color:#00c853}[data-md-color-accent=green] .md-typeset a:active,[data-md-color-accent=green] .md-typeset a:hover{color:#00c853}[data-md-color-accent=green] .md-typeset .codehilite pre::-webkit-scrollbar-thumb:hover,[data-md-color-accent=green] .md-typeset pre code::-webkit-scrollbar-thumb:hover{background-color:#00c853}[data-md-color-accent=green] .md-nav__link:focus,[data-md-color-accent=green] .md-nav__link:hover,[data-md-color-accent=green] .md-typeset .footnote li:hover .footnote-backref:hover,[data-md-color-accent=green] .md-typeset .footnote li:target .footnote-backref,[data-md-color-accent=green] .md-typeset .md-clipboard:active:before,[data-md-color-accent=green] .md-typeset .md-clipboard:hover:before,[data-md-color-accent=green] .md-typeset [id] .headerlink:focus,[data-md-color-accent=green] .md-typeset [id]:hover .headerlink:hover,[data-md-color-accent=green] .md-typeset [id]:target .headerlink{color:#00c853}[data-md-color-accent=green] .md-search__scrollwrap::-webkit-scrollbar-thumb:hover{background-color:#00c853}[data-md-color-accent=green] .md-search-result__link:hover,[data-md-color-accent=green] .md-search-result__link[data-md-state=active]{background-color:rgba(0,200,83,.1)}[data-md-color-accent=green] .md-sidebar__scrollwrap::-webkit-scrollbar-thumb:hover{background-color:#00c853}[data-md-color-accent=green] .md-source-file:hover:before{background-color:#00c853}button[data-md-color-accent=light-green]{background-color:#64dd17}[data-md-color-accent=light-green] .md-typeset a:active,[data-md-color-accent=light-green] .md-typeset a:hover{color:#64dd17}[data-md-color-accent=light-green] .md-typeset .codehilite pre::-webkit-scrollbar-thumb:hover,[data-md-color-accent=light-green] .md-typeset pre code::-webkit-scrollbar-thumb:hover{background-color:#64dd17}[data-md-color-accent=light-green] .md-nav__link:focus,[data-md-color-accent=light-green] .md-nav__link:hover,[data-md-color-accent=light-green] .md-typeset .footnote li:hover .footnote-backref:hover,[data-md-color-accent=light-green] .md-typeset .footnote li:target .footnote-backref,[data-md-color-accent=light-green] .md-typeset .md-clipboard:active:before,[data-md-color-accent=light-green] .md-typeset .md-clipboard:hover:before,[data-md-color-accent=light-green] .md-typeset [id] .headerlink:focus,[data-md-color-accent=light-green] .md-typeset [id]:hover .headerlink:hover,[data-md-color-accent=light-green] .md-typeset [id]:target .headerlink{color:#64dd17}[data-md-color-accent=light-green] .md-search__scrollwrap::-webkit-scrollbar-thumb:hover{background-color:#64dd17}[data-md-color-accent=light-green] .md-search-result__link:hover,[data-md-color-accent=light-green] .md-search-result__link[data-md-state=active]{background-color:rgba(100,221,23,.1)}[data-md-color-accent=light-green] .md-sidebar__scrollwrap::-webkit-scrollbar-thumb:hover{background-color:#64dd17}[data-md-color-accent=light-green] .md-source-file:hover:before{background-color:#64dd17}button[data-md-color-accent=lime]{background-color:#aeea00}[data-md-color-accent=lime] .md-typeset a:active,[data-md-color-accent=lime] .md-typeset a:hover{color:#aeea00}[data-md-color-accent=lime] .md-typeset .codehilite pre::-webkit-scrollbar-thumb:hover,[data-md-color-accent=lime] .md-typeset pre code::-webkit-scrollbar-thumb:hover{background-color:#aeea00}[data-md-color-accent=lime] .md-nav__link:focus,[data-md-color-accent=lime] .md-nav__link:hover,[data-md-color-accent=lime] .md-typeset .footnote li:hover .footnote-backref:hover,[data-md-color-accent=lime] .md-typeset .footnote li:target .footnote-backref,[data-md-color-accent=lime] .md-typeset .md-clipboard:active:before,[data-md-color-accent=lime] .md-typeset .md-clipboard:hover:before,[data-md-color-accent=lime] .md-typeset [id] .headerlink:focus,[data-md-color-accent=lime] .md-typeset [id]:hover .headerlink:hover,[data-md-color-accent=lime] .md-typeset [id]:target .headerlink{color:#aeea00}[data-md-color-accent=lime] .md-search__scrollwrap::-webkit-scrollbar-thumb:hover{background-color:#aeea00}[data-md-color-accent=lime] .md-search-result__link:hover,[data-md-color-accent=lime] .md-search-result__link[data-md-state=active]{background-color:rgba(174,234,0,.1)}[data-md-color-accent=lime] .md-sidebar__scrollwrap::-webkit-scrollbar-thumb:hover{background-color:#aeea00}[data-md-color-accent=lime] .md-source-file:hover:before{background-color:#aeea00}button[data-md-color-accent=yellow]{background-color:#ffd600}[data-md-color-accent=yellow] .md-typeset a:active,[data-md-color-accent=yellow] .md-typeset a:hover{color:#ffd600}[data-md-color-accent=yellow] .md-typeset .codehilite pre::-webkit-scrollbar-thumb:hover,[data-md-color-accent=yellow] .md-typeset pre code::-webkit-scrollbar-thumb:hover{background-color:#ffd600}[data-md-color-accent=yellow] .md-nav__link:focus,[data-md-color-accent=yellow] .md-nav__link:hover,[data-md-color-accent=yellow] .md-typeset .footnote li:hover .footnote-backref:hover,[data-md-color-accent=yellow] .md-typeset .footnote li:target .footnote-backref,[data-md-color-accent=yellow] .md-typeset .md-clipboard:active:before,[data-md-color-accent=yellow] .md-typeset .md-clipboard:hover:before,[data-md-color-accent=yellow] .md-typeset [id] .headerlink:focus,[data-md-color-accent=yellow] .md-typeset [id]:hover .headerlink:hover,[data-md-color-accent=yellow] .md-typeset [id]:target .headerlink{color:#ffd600}[data-md-color-accent=yellow] .md-search__scrollwrap::-webkit-scrollbar-thumb:hover{background-color:#ffd600}[data-md-color-accent=yellow] .md-search-result__link:hover,[data-md-color-accent=yellow] .md-search-result__link[data-md-state=active]{background-color:rgba(255,214,0,.1)}[data-md-color-accent=yellow] .md-sidebar__scrollwrap::-webkit-scrollbar-thumb:hover{background-color:#ffd600}[data-md-color-accent=yellow] .md-source-file:hover:before{background-color:#ffd600}button[data-md-color-accent=amber]{background-color:#ffab00}[data-md-color-accent=amber] .md-typeset a:active,[data-md-color-accent=amber] .md-typeset a:hover{color:#ffab00}[data-md-color-accent=amber] .md-typeset .codehilite pre::-webkit-scrollbar-thumb:hover,[data-md-color-accent=amber] .md-typeset pre code::-webkit-scrollbar-thumb:hover{background-color:#ffab00}[data-md-color-accent=amber] .md-nav__link:focus,[data-md-color-accent=amber] .md-nav__link:hover,[data-md-color-accent=amber] .md-typeset .footnote li:hover .footnote-backref:hover,[data-md-color-accent=amber] .md-typeset .footnote li:target .footnote-backref,[data-md-color-accent=amber] .md-typeset .md-clipboard:active:before,[data-md-color-accent=amber] .md-typeset .md-clipboard:hover:before,[data-md-color-accent=amber] .md-typeset [id] .headerlink:focus,[data-md-color-accent=amber] .md-typeset [id]:hover .headerlink:hover,[data-md-color-accent=amber] .md-typeset [id]:target .headerlink{color:#ffab00}[data-md-color-accent=amber] .md-search__scrollwrap::-webkit-scrollbar-thumb:hover{background-color:#ffab00}[data-md-color-accent=amber] .md-search-result__link:hover,[data-md-color-accent=amber] .md-search-result__link[data-md-state=active]{background-color:rgba(255,171,0,.1)}[data-md-color-accent=amber] .md-sidebar__scrollwrap::-webkit-scrollbar-thumb:hover{background-color:#ffab00}[data-md-color-accent=amber] .md-source-file:hover:before{background-color:#ffab00}button[data-md-color-accent=orange]{background-color:#ff9100}[data-md-color-accent=orange] .md-typeset a:active,[data-md-color-accent=orange] .md-typeset a:hover{color:#ff9100}[data-md-color-accent=orange] .md-typeset .codehilite pre::-webkit-scrollbar-thumb:hover,[data-md-color-accent=orange] .md-typeset pre code::-webkit-scrollbar-thumb:hover{background-color:#ff9100}[data-md-color-accent=orange] .md-nav__link:focus,[data-md-color-accent=orange] .md-nav__link:hover,[data-md-color-accent=orange] .md-typeset .footnote li:hover .footnote-backref:hover,[data-md-color-accent=orange] .md-typeset .footnote li:target .footnote-backref,[data-md-color-accent=orange] .md-typeset .md-clipboard:active:before,[data-md-color-accent=orange] .md-typeset .md-clipboard:hover:before,[data-md-color-accent=orange] .md-typeset [id] .headerlink:focus,[data-md-color-accent=orange] .md-typeset [id]:hover .headerlink:hover,[data-md-color-accent=orange] .md-typeset [id]:target .headerlink{color:#ff9100}[data-md-color-accent=orange] .md-search__scrollwrap::-webkit-scrollbar-thumb:hover{background-color:#ff9100}[data-md-color-accent=orange] .md-search-result__link:hover,[data-md-color-accent=orange] .md-search-result__link[data-md-state=active]{background-color:rgba(255,145,0,.1)}[data-md-color-accent=orange] .md-sidebar__scrollwrap::-webkit-scrollbar-thumb:hover{background-color:#ff9100}[data-md-color-accent=orange] .md-source-file:hover:before{background-color:#ff9100}button[data-md-color-accent=deep-orange]{background-color:#ff6e40}[data-md-color-accent=deep-orange] .md-typeset a:active,[data-md-color-accent=deep-orange] .md-typeset a:hover{color:#ff6e40}[data-md-color-accent=deep-orange] .md-typeset .codehilite pre::-webkit-scrollbar-thumb:hover,[data-md-color-accent=deep-orange] .md-typeset pre code::-webkit-scrollbar-thumb:hover{background-color:#ff6e40}[data-md-color-accent=deep-orange] .md-nav__link:focus,[data-md-color-accent=deep-orange] .md-nav__link:hover,[data-md-color-accent=deep-orange] .md-typeset .footnote li:hover .footnote-backref:hover,[data-md-color-accent=deep-orange] .md-typeset .footnote li:target .footnote-backref,[data-md-color-accent=deep-orange] .md-typeset .md-clipboard:active:before,[data-md-color-accent=deep-orange] .md-typeset .md-clipboard:hover:before,[data-md-color-accent=deep-orange] .md-typeset [id] .headerlink:focus,[data-md-color-accent=deep-orange] .md-typeset [id]:hover .headerlink:hover,[data-md-color-accent=deep-orange] .md-typeset [id]:target .headerlink{color:#ff6e40}[data-md-color-accent=deep-orange] .md-search__scrollwrap::-webkit-scrollbar-thumb:hover{background-color:#ff6e40}[data-md-color-accent=deep-orange] .md-search-result__link:hover,[data-md-color-accent=deep-orange] .md-search-result__link[data-md-state=active]{background-color:rgba(255,110,64,.1)}[data-md-color-accent=deep-orange] .md-sidebar__scrollwrap::-webkit-scrollbar-thumb:hover{background-color:#ff6e40}[data-md-color-accent=deep-orange] .md-source-file:hover:before{background-color:#ff6e40}@media only screen and (max-width:59.9375em){[data-md-color-primary=red] .md-nav__source{background-color:rgba(190,66,64,.9675)}[data-md-color-primary=pink] .md-nav__source{background-color:rgba(185,24,79,.9675)}[data-md-color-primary=purple] .md-nav__source{background-color:rgba(136,57,150,.9675)}[data-md-color-primary=deep-purple] .md-nav__source{background-color:rgba(100,69,154,.9675)}[data-md-color-primary=indigo] .md-nav__source{background-color:rgba(50,64,144,.9675)}[data-md-color-primary=blue] .md-nav__source{background-color:rgba(26,119,193,.9675)}[data-md-color-primary=light-blue] .md-nav__source{background-color:rgba(2,134,194,.9675)}[data-md-color-primary=cyan] .md-nav__source{background-color:rgba(0,150,169,.9675)}[data-md-color-primary=teal] .md-nav__source{background-color:rgba(0,119,108,.9675)}[data-md-color-primary=green] .md-nav__source{background-color:rgba(60,139,64,.9675)}[data-md-color-primary=light-green] .md-nav__source{background-color:rgba(99,142,53,.9675)}[data-md-color-primary=lime] .md-nav__source{background-color:rgba(153,161,41,.9675)}[data-md-color-primary=yellow] .md-nav__source{background-color:rgba(198,134,29,.9675)}[data-md-color-primary=amber] .md-nav__source{background-color:rgba(203,127,0,.9675)}[data-md-color-primary=orange] .md-nav__source{background-color:rgba(200,111,0,.9675)}[data-md-color-primary=deep-orange] .md-nav__source{background-color:rgba(203,89,53,.9675)}[data-md-color-primary=brown] .md-nav__source{background-color:rgba(96,68,57,.9675)}[data-md-color-primary=grey] .md-nav__source{background-color:rgba(93,93,93,.9675)}[data-md-color-primary=blue-grey] .md-nav__source{background-color:rgba(67,88,97,.9675)}[data-md-color-primary=white] .md-nav__source{background-color:rgba(0,0,0,.07);color:rgba(0,0,0,.87)}}@media only screen and (max-width:76.1875em){html [data-md-color-primary=red] .md-nav--primary .md-nav__title--site{background-color:#ef5350}html [data-md-color-primary=pink] .md-nav--primary .md-nav__title--site{background-color:#e91e63}html [data-md-color-primary=purple] .md-nav--primary .md-nav__title--site{background-color:#ab47bc}html [data-md-color-primary=deep-purple] .md-nav--primary .md-nav__title--site{background-color:#7e57c2}html [data-md-color-primary=indigo] .md-nav--primary .md-nav__title--site{background-color:#3f51b5}html [data-md-color-primary=blue] .md-nav--primary .md-nav__title--site{background-color:#2196f3}html [data-md-color-primary=light-blue] .md-nav--primary .md-nav__title--site{background-color:#03a9f4}html [data-md-color-primary=cyan] .md-nav--primary .md-nav__title--site{background-color:#00bcd4}html [data-md-color-primary=teal] .md-nav--primary .md-nav__title--site{background-color:#009688}html [data-md-color-primary=green] .md-nav--primary .md-nav__title--site{background-color:#4caf50}html [data-md-color-primary=light-green] .md-nav--primary .md-nav__title--site{background-color:#7cb342}html [data-md-color-primary=lime] .md-nav--primary .md-nav__title--site{background-color:#c0ca33}html [data-md-color-primary=yellow] .md-nav--primary .md-nav__title--site{background-color:#f9a825}html [data-md-color-primary=amber] .md-nav--primary .md-nav__title--site{background-color:#ffa000}html [data-md-color-primary=orange] .md-nav--primary .md-nav__title--site{background-color:#fb8c00}html [data-md-color-primary=deep-orange] .md-nav--primary .md-nav__title--site{background-color:#ff7043}html [data-md-color-primary=brown] .md-nav--primary .md-nav__title--site{background-color:#795548}html [data-md-color-primary=grey] .md-nav--primary .md-nav__title--site{background-color:#757575}html [data-md-color-primary=blue-grey] .md-nav--primary .md-nav__title--site{background-color:#546e7a}html [data-md-color-primary=white] .md-nav--primary .md-nav__title--site{background-color:#fff;color:rgba(0,0,0,.87)}[data-md-color-primary=white] .md-hero{border-bottom:.1rem solid rgba(0,0,0,.07)}}@media only screen and (min-width:76.25em){[data-md-color-primary=red] .md-tabs{background-color:#ef5350}[data-md-color-primary=pink] .md-tabs{background-color:#e91e63}[data-md-color-primary=purple] .md-tabs{background-color:#ab47bc}[data-md-color-primary=deep-purple] .md-tabs{background-color:#7e57c2}[data-md-color-primary=indigo] .md-tabs{background-color:#3f51b5}[data-md-color-primary=blue] .md-tabs{background-color:#2196f3}[data-md-color-primary=light-blue] .md-tabs{background-color:#03a9f4}[data-md-color-primary=cyan] .md-tabs{background-color:#00bcd4}[data-md-color-primary=teal] .md-tabs{background-color:#009688}[data-md-color-primary=green] .md-tabs{background-color:#4caf50}[data-md-color-primary=light-green] .md-tabs{background-color:#7cb342}[data-md-color-primary=lime] .md-tabs{background-color:#c0ca33}[data-md-color-primary=yellow] .md-tabs{background-color:#f9a825}[data-md-color-primary=amber] .md-tabs{background-color:#ffa000}[data-md-color-primary=orange] .md-tabs{background-color:#fb8c00}[data-md-color-primary=deep-orange] .md-tabs{background-color:#ff7043}[data-md-color-primary=brown] .md-tabs{background-color:#795548}[data-md-color-primary=grey] .md-tabs{background-color:#757575}[data-md-color-primary=blue-grey] .md-tabs{background-color:#546e7a}[data-md-color-primary=white] .md-tabs{border-bottom:.1rem solid rgba(0,0,0,.07);background-color:#fff;color:rgba(0,0,0,.87)}}@media only screen and (min-width:60em){[data-md-color-primary=white] .md-search__input{background-color:rgba(0,0,0,.07)}[data-md-color-primary=white] .md-search__input::-webkit-input-placeholder{color:rgba(0,0,0,.54)}[data-md-color-primary=white] .md-search__input:-ms-input-placeholder,[data-md-color-primary=white] .md-search__input::-ms-input-placeholder{color:rgba(0,0,0,.54)}[data-md-color-primary=white] .md-search__input::placeholder{color:rgba(0,0,0,.54)}} +/*# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJzb3VyY2VzIjpbXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6IiIsImZpbGUiOiJhc3NldHMvc3R5bGVzaGVldHMvYXBwbGljYXRpb24tcGFsZXR0ZS42MDc5NDc2Yy5jc3MiLCJzb3VyY2VSb290IjoiIn0=*/ \ No newline at end of file diff --git a/docs/assets/stylesheets/application.78aab2dc.css b/docs/assets/stylesheets/application.78aab2dc.css new file mode 100644 index 000000000..541bf4518 --- /dev/null +++ b/docs/assets/stylesheets/application.78aab2dc.css @@ -0,0 +1,2 @@ +html{box-sizing:border-box}*,:after,:before{box-sizing:inherit}html{-webkit-text-size-adjust:none;-moz-text-size-adjust:none;-ms-text-size-adjust:none;text-size-adjust:none}body{margin:0}hr{overflow:visible;box-sizing:content-box}a{-webkit-text-decoration-skip:objects}a,button,input,label{-webkit-tap-highlight-color:transparent}a{color:inherit;text-decoration:none}small,sub,sup{font-size:80%}sub,sup{position:relative;line-height:0;vertical-align:baseline}sub{bottom:-.25em}sup{top:-.5em}img{border-style:none}table{border-collapse:separate;border-spacing:0}td,th{font-weight:400;vertical-align:top}button{margin:0;padding:0;border:0;outline-style:none;background:transparent;font-size:inherit}input{border:0;outline:0}.md-clipboard:before,.md-icon,.md-nav__button,.md-nav__link:after,.md-nav__title:before,.md-search-result__article--document:before,.md-source-file:before,.md-typeset .admonition>.admonition-title:before,.md-typeset .admonition>summary:before,.md-typeset .critic.comment:before,.md-typeset .footnote-backref,.md-typeset .task-list-control .task-list-indicator:before,.md-typeset details>.admonition-title:before,.md-typeset details>summary:before,.md-typeset summary:after{font-family:Material Icons;font-style:normal;font-variant:normal;font-weight:400;line-height:1;text-transform:none;white-space:nowrap;speak:none;word-wrap:normal;direction:ltr}.md-content__icon,.md-footer-nav__button,.md-header-nav__button,.md-nav__button,.md-nav__title:before,.md-search-result__article--document:before{display:inline-block;margin:.4rem;padding:.8rem;font-size:2.4rem;cursor:pointer}.md-icon--arrow-back:before{content:"\E5C4"}.md-icon--arrow-forward:before{content:"\E5C8"}.md-icon--menu:before{content:"\E5D2"}.md-icon--search:before{content:"\E8B6"}[dir=rtl] .md-icon--arrow-back:before{content:"\E5C8"}[dir=rtl] .md-icon--arrow-forward:before{content:"\E5C4"}body{-webkit-font-smoothing:antialiased;-moz-osx-font-smoothing:grayscale}body,input{color:rgba(0,0,0,.87);-webkit-font-feature-settings:"kern","liga";font-feature-settings:"kern","liga";font-family:Helvetica Neue,Helvetica,Arial,sans-serif}code,kbd,pre{color:rgba(0,0,0,.87);-webkit-font-feature-settings:"kern";font-feature-settings:"kern";font-family:Courier New,Courier,monospace}.md-typeset{font-size:1.6rem;line-height:1.6;-webkit-print-color-adjust:exact}.md-typeset blockquote,.md-typeset ol,.md-typeset p,.md-typeset ul{margin:1em 0}.md-typeset h1{margin:0 0 4rem;color:rgba(0,0,0,.54);font-size:3.125rem;line-height:1.3}.md-typeset h1,.md-typeset h2{font-weight:300;letter-spacing:-.01em}.md-typeset h2{margin:4rem 0 1.6rem;font-size:2.5rem;line-height:1.4}.md-typeset h3{margin:3.2rem 0 1.6rem;font-size:2rem;font-weight:400;letter-spacing:-.01em;line-height:1.5}.md-typeset h2+h3{margin-top:1.6rem}.md-typeset h4{font-size:1.6rem}.md-typeset h4,.md-typeset h5,.md-typeset h6{margin:1.6rem 0;font-weight:700;letter-spacing:-.01em}.md-typeset h5,.md-typeset h6{color:rgba(0,0,0,.54);font-size:1.28rem}.md-typeset h5{text-transform:uppercase}.md-typeset hr{margin:1.5em 0;border-bottom:.1rem dotted rgba(0,0,0,.26)}.md-typeset a{color:#3f51b5;word-break:break-word}.md-typeset a,.md-typeset a:before{transition:color .125s}.md-typeset a:active,.md-typeset a:hover{color:#536dfe}.md-typeset code,.md-typeset pre{background-color:hsla(0,0%,93%,.5);color:#37474f;font-size:85%;direction:ltr}.md-typeset code{margin:0 .29412em;padding:.07353em 0;border-radius:.2rem;box-shadow:.29412em 0 0 hsla(0,0%,93%,.5),-.29412em 0 0 hsla(0,0%,93%,.5);word-break:break-word;-webkit-box-decoration-break:clone;box-decoration-break:clone}.md-typeset h1 code,.md-typeset h2 code,.md-typeset h3 code,.md-typeset h4 code,.md-typeset h5 code,.md-typeset h6 code{margin:0;background-color:transparent;box-shadow:none}.md-typeset a>code{margin:inherit;padding:inherit;border-radius:none;background-color:inherit;color:inherit;box-shadow:none}.md-typeset pre{position:relative;margin:1em 0;border-radius:.2rem;line-height:1.4;-webkit-overflow-scrolling:touch}.md-typeset pre>code{display:block;margin:0;padding:1.05rem 1.2rem;background-color:transparent;font-size:inherit;box-shadow:none;-webkit-box-decoration-break:none;box-decoration-break:none;overflow:auto}.md-typeset pre>code::-webkit-scrollbar{width:.4rem;height:.4rem}.md-typeset pre>code::-webkit-scrollbar-thumb{background-color:rgba(0,0,0,.26)}.md-typeset pre>code::-webkit-scrollbar-thumb:hover{background-color:#536dfe}.md-typeset kbd{padding:0 .29412em;border:.1rem solid #c9c9c9;border-radius:.3rem;border-bottom-color:#bcbcbc;background-color:#fcfcfc;color:#555;font-size:85%;box-shadow:0 .1rem 0 #b0b0b0;word-break:break-word}.md-typeset mark{margin:0 .25em;padding:.0625em 0;border-radius:.2rem;background-color:rgba(255,235,59,.5);box-shadow:.25em 0 0 rgba(255,235,59,.5),-.25em 0 0 rgba(255,235,59,.5);word-break:break-word;-webkit-box-decoration-break:clone;box-decoration-break:clone}.md-typeset abbr{border-bottom:.1rem dotted rgba(0,0,0,.54);text-decoration:none;cursor:help}.md-typeset small{opacity:.75}.md-typeset sub,.md-typeset sup{margin-left:.07812em}[dir=rtl] .md-typeset sub,[dir=rtl] .md-typeset sup{margin-right:.07812em;margin-left:0}.md-typeset blockquote{padding-left:1.2rem;border-left:.4rem solid rgba(0,0,0,.26);color:rgba(0,0,0,.54)}[dir=rtl] .md-typeset blockquote{padding-right:1.2rem;padding-left:0;border-right:.4rem solid rgba(0,0,0,.26);border-left:initial}.md-typeset ul{list-style-type:disc}.md-typeset ol,.md-typeset ul{margin-left:.625em;padding:0}[dir=rtl] .md-typeset ol,[dir=rtl] .md-typeset ul{margin-right:.625em;margin-left:0}.md-typeset ol ol,.md-typeset ul ol{list-style-type:lower-alpha}.md-typeset ol ol ol,.md-typeset ul ol ol{list-style-type:lower-roman}.md-typeset ol li,.md-typeset ul li{margin-bottom:.5em;margin-left:1.25em}[dir=rtl] .md-typeset ol li,[dir=rtl] .md-typeset ul li{margin-right:1.25em;margin-left:0}.md-typeset ol li blockquote,.md-typeset ol li p,.md-typeset ul li blockquote,.md-typeset ul li p{margin:.5em 0}.md-typeset ol li:last-child,.md-typeset ul li:last-child{margin-bottom:0}.md-typeset ol li ol,.md-typeset ol li ul,.md-typeset ul li ol,.md-typeset ul li ul{margin:.5em 0 .5em .625em}[dir=rtl] .md-typeset ol li ol,[dir=rtl] .md-typeset ol li ul,[dir=rtl] .md-typeset ul li ol,[dir=rtl] .md-typeset ul li ul{margin-right:.625em;margin-left:0}.md-typeset dd{margin:1em 0 1em 1.875em}[dir=rtl] .md-typeset dd{margin-right:1.875em;margin-left:0}.md-typeset iframe,.md-typeset img,.md-typeset svg{max-width:100%}.md-typeset table:not([class]){box-shadow:0 2px 2px 0 rgba(0,0,0,.14),0 1px 5px 0 rgba(0,0,0,.12),0 3px 1px -2px rgba(0,0,0,.2);display:inline-block;max-width:100%;border-radius:.2rem;font-size:1.28rem;overflow:auto;-webkit-overflow-scrolling:touch}.md-typeset table:not([class])+*{margin-top:1.5em}.md-typeset table:not([class]) td:not([align]),.md-typeset table:not([class]) th:not([align]){text-align:left}[dir=rtl] .md-typeset table:not([class]) td:not([align]),[dir=rtl] .md-typeset table:not([class]) th:not([align]){text-align:right}.md-typeset table:not([class]) th{min-width:10rem;padding:1.2rem 1.6rem;background-color:rgba(0,0,0,.54);color:#fff;vertical-align:top}.md-typeset table:not([class]) td{padding:1.2rem 1.6rem;border-top:.1rem solid rgba(0,0,0,.07);vertical-align:top}.md-typeset table:not([class]) tr:first-child td{border-top:0}.md-typeset table:not([class]) a{word-break:normal}.md-typeset__scrollwrap{margin:1em -1.6rem;overflow-x:auto;-webkit-overflow-scrolling:touch}.md-typeset .md-typeset__table{display:inline-block;margin-bottom:.5em;padding:0 1.6rem}.md-typeset .md-typeset__table table{display:table;width:100%;margin:0;overflow:hidden}html{font-size:62.5%;overflow-x:hidden}body,html{height:100%}body{position:relative}hr{display:block;height:.1rem;padding:0;border:0}.md-svg{display:none}.md-grid{max-width:122rem;margin-right:auto;margin-left:auto}.md-container,.md-main{overflow:auto}.md-container{display:table;width:100%;height:100%;padding-top:4.8rem;table-layout:fixed}.md-main{display:table-row;height:100%}.md-main__inner{height:100%;padding-top:3rem;padding-bottom:.1rem}.md-toggle{display:none}.md-overlay{position:fixed;top:0;width:0;height:0;transition:width 0s .25s,height 0s .25s,opacity .25s;background-color:rgba(0,0,0,.54);opacity:0;z-index:3}.md-flex{display:table}.md-flex__cell{display:table-cell;position:relative;vertical-align:top}.md-flex__cell--shrink{width:0}.md-flex__cell--stretch{display:table;width:100%;table-layout:fixed}.md-flex__ellipsis{display:table-cell;text-overflow:ellipsis;white-space:nowrap;overflow:hidden}.md-skip{position:fixed;width:.1rem;height:.1rem;margin:1rem;padding:.6rem 1rem;clip:rect(.1rem);-webkit-transform:translateY(.8rem);transform:translateY(.8rem);border-radius:.2rem;background-color:rgba(0,0,0,.87);color:#fff;font-size:1.28rem;opacity:0;overflow:hidden}.md-skip:focus{width:auto;height:auto;clip:auto;-webkit-transform:translateX(0);transform:translateX(0);transition:opacity .175s 75ms,-webkit-transform .25s cubic-bezier(.4,0,.2,1);transition:transform .25s cubic-bezier(.4,0,.2,1),opacity .175s 75ms;transition:transform .25s cubic-bezier(.4,0,.2,1),opacity .175s 75ms,-webkit-transform .25s cubic-bezier(.4,0,.2,1);opacity:1;z-index:10}@page{margin:25mm}.md-clipboard{position:absolute;top:.6rem;right:.6rem;width:2.8rem;height:2.8rem;border-radius:.2rem;font-size:1.6rem;cursor:pointer;z-index:1;-webkit-backface-visibility:hidden;backface-visibility:hidden}.md-clipboard:before{transition:color .25s,opacity .25s;color:rgba(0,0,0,.07);content:"\E14D"}.codehilite:hover .md-clipboard:before,.md-typeset .highlight:hover .md-clipboard:before,pre:hover .md-clipboard:before{color:rgba(0,0,0,.54)}.md-clipboard:focus:before,.md-clipboard:hover:before{color:#536dfe}.md-clipboard__message{display:block;position:absolute;top:0;right:3.4rem;padding:.6rem 1rem;-webkit-transform:translateX(.8rem);transform:translateX(.8rem);transition:opacity .175s,-webkit-transform .25s cubic-bezier(.9,.1,.9,0);transition:transform .25s cubic-bezier(.9,.1,.9,0),opacity .175s;transition:transform .25s cubic-bezier(.9,.1,.9,0),opacity .175s,-webkit-transform .25s cubic-bezier(.9,.1,.9,0);border-radius:.2rem;background-color:rgba(0,0,0,.54);color:#fff;font-size:1.28rem;white-space:nowrap;opacity:0;pointer-events:none}.md-clipboard__message--active{-webkit-transform:translateX(0);transform:translateX(0);transition:opacity .175s 75ms,-webkit-transform .25s cubic-bezier(.4,0,.2,1);transition:transform .25s cubic-bezier(.4,0,.2,1),opacity .175s 75ms;transition:transform .25s cubic-bezier(.4,0,.2,1),opacity .175s 75ms,-webkit-transform .25s cubic-bezier(.4,0,.2,1);opacity:1;pointer-events:auto}.md-clipboard__message:before{content:attr(aria-label)}.md-clipboard__message:after{display:block;position:absolute;top:50%;right:-.4rem;width:0;margin-top:-.4rem;border-width:.4rem 0 .4rem .4rem;border-style:solid;border-color:transparent rgba(0,0,0,.54);content:""}.md-content__inner{margin:0 1.6rem 2.4rem;padding-top:1.2rem}.md-content__inner:before{display:block;height:.8rem;content:""}.md-content__inner>:last-child{margin-bottom:0}.md-content__icon{position:relative;margin:.8rem 0;padding:0;float:right}.md-typeset .md-content__icon{color:rgba(0,0,0,.26)}.md-header{position:fixed;top:0;right:0;left:0;height:4.8rem;transition:background-color .25s,color .25s;background-color:#3f51b5;color:#fff;box-shadow:none;z-index:2;-webkit-backface-visibility:hidden;backface-visibility:hidden}.no-js .md-header{transition:none;box-shadow:none}.md-header[data-md-state=shadow]{transition:background-color .25s,color .25s,box-shadow .25s;box-shadow:0 0 .4rem rgba(0,0,0,.1),0 .4rem .8rem rgba(0,0,0,.2)}.md-header-nav{padding:0 .4rem}.md-header-nav__button{position:relative;transition:opacity .25s;z-index:1}.md-header-nav__button:hover{opacity:.7}.md-header-nav__button.md-logo *{display:block}.no-js .md-header-nav__button.md-icon--search{display:none}.md-header-nav__topic{display:block;position:absolute;transition:opacity .15s,-webkit-transform .4s cubic-bezier(.1,.7,.1,1);transition:transform .4s cubic-bezier(.1,.7,.1,1),opacity .15s;transition:transform .4s cubic-bezier(.1,.7,.1,1),opacity .15s,-webkit-transform .4s cubic-bezier(.1,.7,.1,1);text-overflow:ellipsis;white-space:nowrap;overflow:hidden}.md-header-nav__topic+.md-header-nav__topic{-webkit-transform:translateX(2.5rem);transform:translateX(2.5rem);transition:opacity .15s,-webkit-transform .4s cubic-bezier(1,.7,.1,.1);transition:transform .4s cubic-bezier(1,.7,.1,.1),opacity .15s;transition:transform .4s cubic-bezier(1,.7,.1,.1),opacity .15s,-webkit-transform .4s cubic-bezier(1,.7,.1,.1);opacity:0;z-index:-1;pointer-events:none}[dir=rtl] .md-header-nav__topic+.md-header-nav__topic{-webkit-transform:translateX(-2.5rem);transform:translateX(-2.5rem)}.no-js .md-header-nav__topic{position:static}.no-js .md-header-nav__topic+.md-header-nav__topic{display:none}.md-header-nav__title{padding:0 2rem;font-size:1.8rem;line-height:4.8rem}.md-header-nav__title[data-md-state=active] .md-header-nav__topic{-webkit-transform:translateX(-2.5rem);transform:translateX(-2.5rem);transition:opacity .15s,-webkit-transform .4s cubic-bezier(1,.7,.1,.1);transition:transform .4s cubic-bezier(1,.7,.1,.1),opacity .15s;transition:transform .4s cubic-bezier(1,.7,.1,.1),opacity .15s,-webkit-transform .4s cubic-bezier(1,.7,.1,.1);opacity:0;z-index:-1;pointer-events:none}[dir=rtl] .md-header-nav__title[data-md-state=active] .md-header-nav__topic{-webkit-transform:translateX(2.5rem);transform:translateX(2.5rem)}.md-header-nav__title[data-md-state=active] .md-header-nav__topic+.md-header-nav__topic{-webkit-transform:translateX(0);transform:translateX(0);transition:opacity .15s,-webkit-transform .4s cubic-bezier(.1,.7,.1,1);transition:transform .4s cubic-bezier(.1,.7,.1,1),opacity .15s;transition:transform .4s cubic-bezier(.1,.7,.1,1),opacity .15s,-webkit-transform .4s cubic-bezier(.1,.7,.1,1);opacity:1;z-index:0;pointer-events:auto}.md-header-nav__source{display:none}.md-hero{transition:background .25s;background-color:#3f51b5;color:#fff;font-size:2rem;overflow:hidden}.md-hero__inner{margin-top:2rem;padding:1.6rem 1.6rem .8rem;transition:opacity .25s,-webkit-transform .4s cubic-bezier(.1,.7,.1,1);transition:transform .4s cubic-bezier(.1,.7,.1,1),opacity .25s;transition:transform .4s cubic-bezier(.1,.7,.1,1),opacity .25s,-webkit-transform .4s cubic-bezier(.1,.7,.1,1);transition-delay:.1s}[data-md-state=hidden] .md-hero__inner{pointer-events:none;-webkit-transform:translateY(1.25rem);transform:translateY(1.25rem);transition:opacity .1s 0s,-webkit-transform 0s .4s;transition:transform 0s .4s,opacity .1s 0s;transition:transform 0s .4s,opacity .1s 0s,-webkit-transform 0s .4s;opacity:0}.md-hero--expand .md-hero__inner{margin-bottom:2.4rem}.md-footer-nav{background-color:rgba(0,0,0,.87);color:#fff}.md-footer-nav__inner{padding:.4rem;overflow:auto}.md-footer-nav__link{padding-top:2.8rem;padding-bottom:.8rem;transition:opacity .25s}.md-footer-nav__link:hover{opacity:.7}.md-footer-nav__link--prev{width:25%;float:left}[dir=rtl] .md-footer-nav__link--prev{float:right}.md-footer-nav__link--next{width:75%;float:right;text-align:right}[dir=rtl] .md-footer-nav__link--next{float:left;text-align:left}.md-footer-nav__button{transition:background .25s}.md-footer-nav__title{position:relative;padding:0 2rem;font-size:1.8rem;line-height:4.8rem}.md-footer-nav__direction{position:absolute;right:0;left:0;margin-top:-2rem;padding:0 2rem;color:hsla(0,0%,100%,.7);font-size:1.5rem}.md-footer-meta{background-color:rgba(0,0,0,.895)}.md-footer-meta__inner{padding:.4rem;overflow:auto}html .md-footer-meta.md-typeset a{color:hsla(0,0%,100%,.7)}html .md-footer-meta.md-typeset a:focus,html .md-footer-meta.md-typeset a:hover{color:#fff}.md-footer-copyright{margin:0 1.2rem;padding:.8rem 0;color:hsla(0,0%,100%,.3);font-size:1.28rem}.md-footer-copyright__highlight{color:hsla(0,0%,100%,.7)}.md-footer-social{margin:0 .8rem;padding:.4rem 0 1.2rem}.md-footer-social__link{display:inline-block;width:3.2rem;height:3.2rem;font-size:1.6rem;text-align:center}.md-footer-social__link:before{line-height:1.9}.md-nav{font-size:1.4rem;line-height:1.3}.md-nav__title{display:block;padding:0 1.2rem;font-weight:700;text-overflow:ellipsis;overflow:hidden}.md-nav__title:before{display:none;content:"\E5C4"}[dir=rtl] .md-nav__title:before{content:"\E5C8"}.md-nav__title .md-nav__button{display:none}.md-nav__list{margin:0;padding:0;list-style:none}.md-nav__item{padding:0 1.2rem}.md-nav__item:last-child{padding-bottom:1.2rem}.md-nav__item .md-nav__item{padding-right:0}[dir=rtl] .md-nav__item .md-nav__item{padding-right:1.2rem;padding-left:0}.md-nav__item .md-nav__item:last-child{padding-bottom:0}.md-nav__button img{width:100%;height:auto}.md-nav__link{display:block;margin-top:.625em;transition:color .125s;text-overflow:ellipsis;cursor:pointer;overflow:hidden}.md-nav__item--nested>.md-nav__link:after{content:"\E313"}html .md-nav__link[for=toc],html .md-nav__link[for=toc]+.md-nav__link:after,html .md-nav__link[for=toc]~.md-nav{display:none}.md-nav__link[data-md-state=blur]{color:rgba(0,0,0,.54)}.md-nav__link--active,.md-nav__link:active{color:#3f51b5}.md-nav__item--nested>.md-nav__link{color:inherit}.md-nav__link:focus,.md-nav__link:hover{color:#536dfe}.md-nav__source,.no-js .md-search{display:none}.md-search__overlay{opacity:0;z-index:1}.md-search__form{position:relative}.md-search__input{position:relative;padding:0 4.4rem 0 7.2rem;text-overflow:ellipsis;z-index:2}[dir=rtl] .md-search__input{padding:0 7.2rem 0 4.4rem}.md-search__input::-webkit-input-placeholder{transition:color .25s cubic-bezier(.1,.7,.1,1)}.md-search__input:-ms-input-placeholder,.md-search__input::-ms-input-placeholder{transition:color .25s cubic-bezier(.1,.7,.1,1)}.md-search__input::placeholder{transition:color .25s cubic-bezier(.1,.7,.1,1)}.md-search__input::-webkit-input-placeholder,.md-search__input~.md-search__icon{color:rgba(0,0,0,.54)}.md-search__input:-ms-input-placeholder,.md-search__input::-ms-input-placeholder,.md-search__input~.md-search__icon{color:rgba(0,0,0,.54)}.md-search__input::placeholder,.md-search__input~.md-search__icon{color:rgba(0,0,0,.54)}.md-search__input::-ms-clear{display:none}.md-search__icon{position:absolute;transition:color .25s cubic-bezier(.1,.7,.1,1),opacity .25s;font-size:2.4rem;cursor:pointer;z-index:2}.md-search__icon:hover{opacity:.7}.md-search__icon[for=search]{top:.6rem;left:1rem}[dir=rtl] .md-search__icon[for=search]{right:1rem;left:auto}.md-search__icon[for=search]:before{content:"\E8B6"}.md-search__icon[type=reset]{top:.6rem;right:1rem;-webkit-transform:scale(.125);transform:scale(.125);transition:opacity .15s,-webkit-transform .15s cubic-bezier(.1,.7,.1,1);transition:transform .15s cubic-bezier(.1,.7,.1,1),opacity .15s;transition:transform .15s cubic-bezier(.1,.7,.1,1),opacity .15s,-webkit-transform .15s cubic-bezier(.1,.7,.1,1);opacity:0}[dir=rtl] .md-search__icon[type=reset]{right:auto;left:1rem}[data-md-toggle=search]:checked~.md-header .md-search__input:valid~.md-search__icon[type=reset]{-webkit-transform:scale(1);transform:scale(1);opacity:1}[data-md-toggle=search]:checked~.md-header .md-search__input:valid~.md-search__icon[type=reset]:hover{opacity:.7}.md-search__output{position:absolute;width:100%;border-radius:0 0 .2rem .2rem;overflow:hidden;z-index:1}.md-search__scrollwrap{height:100%;background-color:#fff;box-shadow:inset 0 .1rem 0 rgba(0,0,0,.07);overflow-y:auto;-webkit-overflow-scrolling:touch}.md-search-result{color:rgba(0,0,0,.87);word-break:break-word}.md-search-result__meta{padding:0 1.6rem;background-color:rgba(0,0,0,.07);color:rgba(0,0,0,.54);font-size:1.28rem;line-height:3.6rem}.md-search-result__list{margin:0;padding:0;border-top:.1rem solid rgba(0,0,0,.07);list-style:none}.md-search-result__item{box-shadow:0 -.1rem 0 rgba(0,0,0,.07)}.md-search-result__link{display:block;transition:background .25s;outline:0;overflow:hidden}.md-search-result__link:hover,.md-search-result__link[data-md-state=active]{background-color:rgba(83,109,254,.1)}.md-search-result__link:hover .md-search-result__article:before,.md-search-result__link[data-md-state=active] .md-search-result__article:before{opacity:.7}.md-search-result__link:last-child .md-search-result__teaser{margin-bottom:1.2rem}.md-search-result__article{position:relative;padding:0 1.6rem;overflow:auto}.md-search-result__article--document:before{position:absolute;left:0;margin:.2rem;transition:opacity .25s;color:rgba(0,0,0,.54);content:"\E880"}[dir=rtl] .md-search-result__article--document:before{right:0;left:auto}.md-search-result__article--document .md-search-result__title{margin:1.1rem 0;font-size:1.6rem;font-weight:400;line-height:1.4}.md-search-result__title{margin:.5em 0;font-size:1.28rem;font-weight:700;line-height:1.4}.md-search-result__teaser{display:-webkit-box;max-height:3.3rem;margin:.5em 0;color:rgba(0,0,0,.54);font-size:1.28rem;line-height:1.4;text-overflow:ellipsis;overflow:hidden;-webkit-line-clamp:2}.md-search-result em{font-style:normal;font-weight:700;text-decoration:underline}.md-sidebar{position:absolute;width:24.2rem;padding:2.4rem 0;overflow:hidden}.md-sidebar[data-md-state=lock]{position:fixed;top:4.8rem}.md-sidebar--secondary{display:none}.md-sidebar__scrollwrap{max-height:100%;margin:0 .4rem;overflow-y:auto;-webkit-backface-visibility:hidden;backface-visibility:hidden}.md-sidebar__scrollwrap::-webkit-scrollbar{width:.4rem;height:.4rem}.md-sidebar__scrollwrap::-webkit-scrollbar-thumb{background-color:rgba(0,0,0,.26)}.md-sidebar__scrollwrap::-webkit-scrollbar-thumb:hover{background-color:#536dfe}@-webkit-keyframes md-source__facts--done{0%{height:0}to{height:1.3rem}}@keyframes md-source__facts--done{0%{height:0}to{height:1.3rem}}@-webkit-keyframes md-source__fact--done{0%{-webkit-transform:translateY(100%);transform:translateY(100%);opacity:0}50%{opacity:0}to{-webkit-transform:translateY(0);transform:translateY(0);opacity:1}}@keyframes md-source__fact--done{0%{-webkit-transform:translateY(100%);transform:translateY(100%);opacity:0}50%{opacity:0}to{-webkit-transform:translateY(0);transform:translateY(0);opacity:1}}.md-source{display:block;padding-right:1.2rem;transition:opacity .25s;font-size:1.3rem;line-height:1.2;white-space:nowrap}[dir=rtl] .md-source{padding-right:0;padding-left:1.2rem}.md-source:hover{opacity:.7}.md-source:after,.md-source__icon{display:inline-block;height:4.8rem;content:"";vertical-align:middle}.md-source__icon{width:4.8rem}.md-source__icon svg{width:2.4rem;height:2.4rem;margin-top:1.2rem;margin-left:1.2rem}[dir=rtl] .md-source__icon svg{margin-right:1.2rem;margin-left:0}.md-source__icon+.md-source__repository{margin-left:-4.4rem;padding-left:4rem}[dir=rtl] .md-source__icon+.md-source__repository{margin-right:-4.4rem;margin-left:0;padding-right:4rem;padding-left:0}.md-source__repository{display:inline-block;max-width:100%;margin-left:1.2rem;font-weight:700;text-overflow:ellipsis;overflow:hidden;vertical-align:middle}.md-source__facts{margin:0;padding:0;font-size:1.1rem;font-weight:700;list-style-type:none;opacity:.75;overflow:hidden}[data-md-state=done] .md-source__facts{-webkit-animation:md-source__facts--done .25s ease-in;animation:md-source__facts--done .25s ease-in}.md-source__fact{float:left}[dir=rtl] .md-source__fact{float:right}[data-md-state=done] .md-source__fact{-webkit-animation:md-source__fact--done .4s ease-out;animation:md-source__fact--done .4s ease-out}.md-source__fact:before{margin:0 .2rem;content:"\B7"}.md-source__fact:first-child:before{display:none}.md-source-file{display:inline-block;margin:1em .5em 1em 0;padding-right:.5rem;border-radius:.2rem;background-color:rgba(0,0,0,.07);font-size:1.28rem;list-style-type:none;cursor:pointer;overflow:hidden}.md-source-file:before{display:inline-block;margin-right:.5rem;padding:.5rem;background-color:rgba(0,0,0,.26);color:#fff;font-size:1.6rem;content:"\E86F";vertical-align:middle}html .md-source-file{transition:background .4s,color .4s,box-shadow .4s cubic-bezier(.4,0,.2,1)}html .md-source-file:before{transition:inherit}html body .md-typeset .md-source-file{color:rgba(0,0,0,.54)}.md-source-file:hover{box-shadow:0 0 8px rgba(0,0,0,.18),0 8px 16px rgba(0,0,0,.36)}.md-source-file:hover:before{background-color:#536dfe}.md-tabs{width:100%;transition:background .25s;background-color:#3f51b5;color:#fff;overflow:auto}.md-tabs__list{margin:0;margin-left:.4rem;padding:0;list-style:none;white-space:nowrap}.md-tabs__item{display:inline-block;height:4.8rem;padding-right:1.2rem;padding-left:1.2rem}.md-tabs__link{display:block;margin-top:1.6rem;transition:opacity .25s,-webkit-transform .4s cubic-bezier(.1,.7,.1,1);transition:transform .4s cubic-bezier(.1,.7,.1,1),opacity .25s;transition:transform .4s cubic-bezier(.1,.7,.1,1),opacity .25s,-webkit-transform .4s cubic-bezier(.1,.7,.1,1);font-size:1.4rem;opacity:.7}.md-tabs__link--active,.md-tabs__link:hover{color:inherit;opacity:1}.md-tabs__item:nth-child(2) .md-tabs__link{transition-delay:.02s}.md-tabs__item:nth-child(3) .md-tabs__link{transition-delay:.04s}.md-tabs__item:nth-child(4) .md-tabs__link{transition-delay:.06s}.md-tabs__item:nth-child(5) .md-tabs__link{transition-delay:.08s}.md-tabs__item:nth-child(6) .md-tabs__link{transition-delay:.1s}.md-tabs__item:nth-child(7) .md-tabs__link{transition-delay:.12s}.md-tabs__item:nth-child(8) .md-tabs__link{transition-delay:.14s}.md-tabs__item:nth-child(9) .md-tabs__link{transition-delay:.16s}.md-tabs__item:nth-child(10) .md-tabs__link{transition-delay:.18s}.md-tabs__item:nth-child(11) .md-tabs__link{transition-delay:.2s}.md-tabs__item:nth-child(12) .md-tabs__link{transition-delay:.22s}.md-tabs__item:nth-child(13) .md-tabs__link{transition-delay:.24s}.md-tabs__item:nth-child(14) .md-tabs__link{transition-delay:.26s}.md-tabs__item:nth-child(15) .md-tabs__link{transition-delay:.28s}.md-tabs__item:nth-child(16) .md-tabs__link{transition-delay:.3s}.md-tabs[data-md-state=hidden]{pointer-events:none}.md-tabs[data-md-state=hidden] .md-tabs__link{-webkit-transform:translateY(50%);transform:translateY(50%);transition:color .25s,opacity .1s,-webkit-transform 0s .4s;transition:color .25s,transform 0s .4s,opacity .1s;transition:color .25s,transform 0s .4s,opacity .1s,-webkit-transform 0s .4s;opacity:0}.md-typeset .admonition,.md-typeset details{box-shadow:0 2px 2px 0 rgba(0,0,0,.14),0 1px 5px 0 rgba(0,0,0,.12),0 3px 1px -2px rgba(0,0,0,.2);position:relative;margin:1.5625em 0;padding:0 1.2rem;border-left:.4rem solid #448aff;border-radius:.2rem;font-size:1.28rem;overflow:auto}[dir=rtl] .md-typeset .admonition,[dir=rtl] .md-typeset details{border-right:.4rem solid #448aff;border-left:none}html .md-typeset .admonition>:last-child,html .md-typeset details>:last-child{margin-bottom:1.2rem}.md-typeset .admonition .admonition,.md-typeset .admonition details,.md-typeset details .admonition,.md-typeset details details{margin:1em 0}.md-typeset .admonition>.admonition-title,.md-typeset .admonition>summary,.md-typeset details>.admonition-title,.md-typeset details>summary{margin:0 -1.2rem;padding:.8rem 1.2rem .8rem 4rem;border-bottom:.1rem solid rgba(68,138,255,.1);background-color:rgba(68,138,255,.1);font-weight:700}[dir=rtl] .md-typeset .admonition>.admonition-title,[dir=rtl] .md-typeset .admonition>summary,[dir=rtl] .md-typeset details>.admonition-title,[dir=rtl] .md-typeset details>summary{padding:.8rem 4rem .8rem 1.2rem}.md-typeset .admonition>.admonition-title:last-child,.md-typeset .admonition>summary:last-child,.md-typeset details>.admonition-title:last-child,.md-typeset details>summary:last-child{margin-bottom:0}.md-typeset .admonition>.admonition-title:before,.md-typeset .admonition>summary:before,.md-typeset details>.admonition-title:before,.md-typeset details>summary:before{position:absolute;left:1.2rem;color:#448aff;font-size:2rem;content:"\E3C9"}[dir=rtl] .md-typeset .admonition>.admonition-title:before,[dir=rtl] .md-typeset .admonition>summary:before,[dir=rtl] .md-typeset details>.admonition-title:before,[dir=rtl] .md-typeset details>summary:before{right:1.2rem;left:auto}.md-typeset .admonition.abstract,.md-typeset .admonition.summary,.md-typeset .admonition.tldr,.md-typeset details.abstract,.md-typeset details.summary,.md-typeset details.tldr{border-left-color:#00b0ff}[dir=rtl] .md-typeset .admonition.abstract,[dir=rtl] .md-typeset .admonition.summary,[dir=rtl] .md-typeset .admonition.tldr,[dir=rtl] .md-typeset details.abstract,[dir=rtl] .md-typeset details.summary,[dir=rtl] .md-typeset details.tldr{border-right-color:#00b0ff}.md-typeset .admonition.abstract>.admonition-title,.md-typeset .admonition.abstract>summary,.md-typeset .admonition.summary>.admonition-title,.md-typeset .admonition.summary>summary,.md-typeset .admonition.tldr>.admonition-title,.md-typeset .admonition.tldr>summary,.md-typeset details.abstract>.admonition-title,.md-typeset details.abstract>summary,.md-typeset details.summary>.admonition-title,.md-typeset details.summary>summary,.md-typeset details.tldr>.admonition-title,.md-typeset details.tldr>summary{border-bottom-color:.1rem solid rgba(0,176,255,.1);background-color:rgba(0,176,255,.1)}.md-typeset .admonition.abstract>.admonition-title:before,.md-typeset .admonition.abstract>summary:before,.md-typeset .admonition.summary>.admonition-title:before,.md-typeset .admonition.summary>summary:before,.md-typeset .admonition.tldr>.admonition-title:before,.md-typeset .admonition.tldr>summary:before,.md-typeset details.abstract>.admonition-title:before,.md-typeset details.abstract>summary:before,.md-typeset details.summary>.admonition-title:before,.md-typeset details.summary>summary:before,.md-typeset details.tldr>.admonition-title:before,.md-typeset details.tldr>summary:before{color:#00b0ff;content:"\E8D2"}.md-typeset .admonition.info,.md-typeset .admonition.todo,.md-typeset details.info,.md-typeset details.todo{border-left-color:#00b8d4}[dir=rtl] .md-typeset .admonition.info,[dir=rtl] .md-typeset .admonition.todo,[dir=rtl] .md-typeset details.info,[dir=rtl] .md-typeset details.todo{border-right-color:#00b8d4}.md-typeset .admonition.info>.admonition-title,.md-typeset .admonition.info>summary,.md-typeset .admonition.todo>.admonition-title,.md-typeset .admonition.todo>summary,.md-typeset details.info>.admonition-title,.md-typeset details.info>summary,.md-typeset details.todo>.admonition-title,.md-typeset details.todo>summary{border-bottom-color:.1rem solid rgba(0,184,212,.1);background-color:rgba(0,184,212,.1)}.md-typeset .admonition.info>.admonition-title:before,.md-typeset .admonition.info>summary:before,.md-typeset .admonition.todo>.admonition-title:before,.md-typeset .admonition.todo>summary:before,.md-typeset details.info>.admonition-title:before,.md-typeset details.info>summary:before,.md-typeset details.todo>.admonition-title:before,.md-typeset details.todo>summary:before{color:#00b8d4;content:"\E88E"}.md-typeset .admonition.hint,.md-typeset .admonition.important,.md-typeset .admonition.tip,.md-typeset details.hint,.md-typeset details.important,.md-typeset details.tip{border-left-color:#00bfa5}[dir=rtl] .md-typeset .admonition.hint,[dir=rtl] .md-typeset .admonition.important,[dir=rtl] .md-typeset .admonition.tip,[dir=rtl] .md-typeset details.hint,[dir=rtl] .md-typeset details.important,[dir=rtl] .md-typeset details.tip{border-right-color:#00bfa5}.md-typeset .admonition.hint>.admonition-title,.md-typeset .admonition.hint>summary,.md-typeset .admonition.important>.admonition-title,.md-typeset .admonition.important>summary,.md-typeset .admonition.tip>.admonition-title,.md-typeset .admonition.tip>summary,.md-typeset details.hint>.admonition-title,.md-typeset details.hint>summary,.md-typeset details.important>.admonition-title,.md-typeset details.important>summary,.md-typeset details.tip>.admonition-title,.md-typeset details.tip>summary{border-bottom-color:.1rem solid rgba(0,191,165,.1);background-color:rgba(0,191,165,.1)}.md-typeset .admonition.hint>.admonition-title:before,.md-typeset .admonition.hint>summary:before,.md-typeset .admonition.important>.admonition-title:before,.md-typeset .admonition.important>summary:before,.md-typeset .admonition.tip>.admonition-title:before,.md-typeset .admonition.tip>summary:before,.md-typeset details.hint>.admonition-title:before,.md-typeset details.hint>summary:before,.md-typeset details.important>.admonition-title:before,.md-typeset details.important>summary:before,.md-typeset details.tip>.admonition-title:before,.md-typeset details.tip>summary:before{color:#00bfa5;content:"\E80E"}.md-typeset .admonition.check,.md-typeset .admonition.done,.md-typeset .admonition.success,.md-typeset details.check,.md-typeset details.done,.md-typeset details.success{border-left-color:#00c853}[dir=rtl] .md-typeset .admonition.check,[dir=rtl] .md-typeset .admonition.done,[dir=rtl] .md-typeset .admonition.success,[dir=rtl] .md-typeset details.check,[dir=rtl] .md-typeset details.done,[dir=rtl] .md-typeset details.success{border-right-color:#00c853}.md-typeset .admonition.check>.admonition-title,.md-typeset .admonition.check>summary,.md-typeset .admonition.done>.admonition-title,.md-typeset .admonition.done>summary,.md-typeset .admonition.success>.admonition-title,.md-typeset .admonition.success>summary,.md-typeset details.check>.admonition-title,.md-typeset details.check>summary,.md-typeset details.done>.admonition-title,.md-typeset details.done>summary,.md-typeset details.success>.admonition-title,.md-typeset details.success>summary{border-bottom-color:.1rem solid rgba(0,200,83,.1);background-color:rgba(0,200,83,.1)}.md-typeset .admonition.check>.admonition-title:before,.md-typeset .admonition.check>summary:before,.md-typeset .admonition.done>.admonition-title:before,.md-typeset .admonition.done>summary:before,.md-typeset .admonition.success>.admonition-title:before,.md-typeset .admonition.success>summary:before,.md-typeset details.check>.admonition-title:before,.md-typeset details.check>summary:before,.md-typeset details.done>.admonition-title:before,.md-typeset details.done>summary:before,.md-typeset details.success>.admonition-title:before,.md-typeset details.success>summary:before{color:#00c853;content:"\E876"}.md-typeset .admonition.faq,.md-typeset .admonition.help,.md-typeset .admonition.question,.md-typeset details.faq,.md-typeset details.help,.md-typeset details.question{border-left-color:#64dd17}[dir=rtl] .md-typeset .admonition.faq,[dir=rtl] .md-typeset .admonition.help,[dir=rtl] .md-typeset .admonition.question,[dir=rtl] .md-typeset details.faq,[dir=rtl] .md-typeset details.help,[dir=rtl] .md-typeset details.question{border-right-color:#64dd17}.md-typeset .admonition.faq>.admonition-title,.md-typeset .admonition.faq>summary,.md-typeset .admonition.help>.admonition-title,.md-typeset .admonition.help>summary,.md-typeset .admonition.question>.admonition-title,.md-typeset .admonition.question>summary,.md-typeset details.faq>.admonition-title,.md-typeset details.faq>summary,.md-typeset details.help>.admonition-title,.md-typeset details.help>summary,.md-typeset details.question>.admonition-title,.md-typeset details.question>summary{border-bottom-color:.1rem solid rgba(100,221,23,.1);background-color:rgba(100,221,23,.1)}.md-typeset .admonition.faq>.admonition-title:before,.md-typeset .admonition.faq>summary:before,.md-typeset .admonition.help>.admonition-title:before,.md-typeset .admonition.help>summary:before,.md-typeset .admonition.question>.admonition-title:before,.md-typeset .admonition.question>summary:before,.md-typeset details.faq>.admonition-title:before,.md-typeset details.faq>summary:before,.md-typeset details.help>.admonition-title:before,.md-typeset details.help>summary:before,.md-typeset details.question>.admonition-title:before,.md-typeset details.question>summary:before{color:#64dd17;content:"\E887"}.md-typeset .admonition.attention,.md-typeset .admonition.caution,.md-typeset .admonition.warning,.md-typeset details.attention,.md-typeset details.caution,.md-typeset details.warning{border-left-color:#ff9100}[dir=rtl] .md-typeset .admonition.attention,[dir=rtl] .md-typeset .admonition.caution,[dir=rtl] .md-typeset .admonition.warning,[dir=rtl] .md-typeset details.attention,[dir=rtl] .md-typeset details.caution,[dir=rtl] .md-typeset details.warning{border-right-color:#ff9100}.md-typeset .admonition.attention>.admonition-title,.md-typeset .admonition.attention>summary,.md-typeset .admonition.caution>.admonition-title,.md-typeset .admonition.caution>summary,.md-typeset .admonition.warning>.admonition-title,.md-typeset .admonition.warning>summary,.md-typeset details.attention>.admonition-title,.md-typeset details.attention>summary,.md-typeset details.caution>.admonition-title,.md-typeset details.caution>summary,.md-typeset details.warning>.admonition-title,.md-typeset details.warning>summary{border-bottom-color:.1rem solid rgba(255,145,0,.1);background-color:rgba(255,145,0,.1)}.md-typeset .admonition.attention>.admonition-title:before,.md-typeset .admonition.attention>summary:before,.md-typeset .admonition.caution>.admonition-title:before,.md-typeset .admonition.caution>summary:before,.md-typeset .admonition.warning>.admonition-title:before,.md-typeset .admonition.warning>summary:before,.md-typeset details.attention>.admonition-title:before,.md-typeset details.attention>summary:before,.md-typeset details.caution>.admonition-title:before,.md-typeset details.caution>summary:before,.md-typeset details.warning>.admonition-title:before,.md-typeset details.warning>summary:before{color:#ff9100;content:"\E002"}.md-typeset .admonition.fail,.md-typeset .admonition.failure,.md-typeset .admonition.missing,.md-typeset details.fail,.md-typeset details.failure,.md-typeset details.missing{border-left-color:#ff5252}[dir=rtl] .md-typeset .admonition.fail,[dir=rtl] .md-typeset .admonition.failure,[dir=rtl] .md-typeset .admonition.missing,[dir=rtl] .md-typeset details.fail,[dir=rtl] .md-typeset details.failure,[dir=rtl] .md-typeset details.missing{border-right-color:#ff5252}.md-typeset .admonition.fail>.admonition-title,.md-typeset .admonition.fail>summary,.md-typeset .admonition.failure>.admonition-title,.md-typeset .admonition.failure>summary,.md-typeset .admonition.missing>.admonition-title,.md-typeset .admonition.missing>summary,.md-typeset details.fail>.admonition-title,.md-typeset details.fail>summary,.md-typeset details.failure>.admonition-title,.md-typeset details.failure>summary,.md-typeset details.missing>.admonition-title,.md-typeset details.missing>summary{border-bottom-color:.1rem solid rgba(255,82,82,.1);background-color:rgba(255,82,82,.1)}.md-typeset .admonition.fail>.admonition-title:before,.md-typeset .admonition.fail>summary:before,.md-typeset .admonition.failure>.admonition-title:before,.md-typeset .admonition.failure>summary:before,.md-typeset .admonition.missing>.admonition-title:before,.md-typeset .admonition.missing>summary:before,.md-typeset details.fail>.admonition-title:before,.md-typeset details.fail>summary:before,.md-typeset details.failure>.admonition-title:before,.md-typeset details.failure>summary:before,.md-typeset details.missing>.admonition-title:before,.md-typeset details.missing>summary:before{color:#ff5252;content:"\E14C"}.md-typeset .admonition.danger,.md-typeset .admonition.error,.md-typeset details.danger,.md-typeset details.error{border-left-color:#ff1744}[dir=rtl] .md-typeset .admonition.danger,[dir=rtl] .md-typeset .admonition.error,[dir=rtl] .md-typeset details.danger,[dir=rtl] .md-typeset details.error{border-right-color:#ff1744}.md-typeset .admonition.danger>.admonition-title,.md-typeset .admonition.danger>summary,.md-typeset .admonition.error>.admonition-title,.md-typeset .admonition.error>summary,.md-typeset details.danger>.admonition-title,.md-typeset details.danger>summary,.md-typeset details.error>.admonition-title,.md-typeset details.error>summary{border-bottom-color:.1rem solid rgba(255,23,68,.1);background-color:rgba(255,23,68,.1)}.md-typeset .admonition.danger>.admonition-title:before,.md-typeset .admonition.danger>summary:before,.md-typeset .admonition.error>.admonition-title:before,.md-typeset .admonition.error>summary:before,.md-typeset details.danger>.admonition-title:before,.md-typeset details.danger>summary:before,.md-typeset details.error>.admonition-title:before,.md-typeset details.error>summary:before{color:#ff1744;content:"\E3E7"}.md-typeset .admonition.bug,.md-typeset details.bug{border-left-color:#f50057}[dir=rtl] .md-typeset .admonition.bug,[dir=rtl] .md-typeset details.bug{border-right-color:#f50057}.md-typeset .admonition.bug>.admonition-title,.md-typeset .admonition.bug>summary,.md-typeset details.bug>.admonition-title,.md-typeset details.bug>summary{border-bottom-color:.1rem solid rgba(245,0,87,.1);background-color:rgba(245,0,87,.1)}.md-typeset .admonition.bug>.admonition-title:before,.md-typeset .admonition.bug>summary:before,.md-typeset details.bug>.admonition-title:before,.md-typeset details.bug>summary:before{color:#f50057;content:"\E868"}.md-typeset .admonition.example,.md-typeset details.example{border-left-color:#651fff}[dir=rtl] .md-typeset .admonition.example,[dir=rtl] .md-typeset details.example{border-right-color:#651fff}.md-typeset .admonition.example>.admonition-title,.md-typeset .admonition.example>summary,.md-typeset details.example>.admonition-title,.md-typeset details.example>summary{border-bottom-color:.1rem solid rgba(101,31,255,.1);background-color:rgba(101,31,255,.1)}.md-typeset .admonition.example>.admonition-title:before,.md-typeset .admonition.example>summary:before,.md-typeset details.example>.admonition-title:before,.md-typeset details.example>summary:before{color:#651fff;content:"\E242"}.md-typeset .admonition.cite,.md-typeset .admonition.quote,.md-typeset details.cite,.md-typeset details.quote{border-left-color:#9e9e9e}[dir=rtl] .md-typeset .admonition.cite,[dir=rtl] .md-typeset .admonition.quote,[dir=rtl] .md-typeset details.cite,[dir=rtl] .md-typeset details.quote{border-right-color:#9e9e9e}.md-typeset .admonition.cite>.admonition-title,.md-typeset .admonition.cite>summary,.md-typeset .admonition.quote>.admonition-title,.md-typeset .admonition.quote>summary,.md-typeset details.cite>.admonition-title,.md-typeset details.cite>summary,.md-typeset details.quote>.admonition-title,.md-typeset details.quote>summary{border-bottom-color:.1rem solid hsla(0,0%,62%,.1);background-color:hsla(0,0%,62%,.1)}.md-typeset .admonition.cite>.admonition-title:before,.md-typeset .admonition.cite>summary:before,.md-typeset .admonition.quote>.admonition-title:before,.md-typeset .admonition.quote>summary:before,.md-typeset details.cite>.admonition-title:before,.md-typeset details.cite>summary:before,.md-typeset details.quote>.admonition-title:before,.md-typeset details.quote>summary:before{color:#9e9e9e;content:"\E244"}.codehilite .o,.codehilite .ow,.md-typeset .highlight .o,.md-typeset .highlight .ow{color:inherit}.codehilite .ge,.md-typeset .highlight .ge{color:#000}.codehilite .gr,.md-typeset .highlight .gr{color:#a00}.codehilite .gh,.md-typeset .highlight .gh{color:#999}.codehilite .go,.md-typeset .highlight .go{color:#888}.codehilite .gp,.md-typeset .highlight .gp{color:#555}.codehilite .gs,.md-typeset .highlight .gs{color:inherit}.codehilite .gu,.md-typeset .highlight .gu{color:#aaa}.codehilite .gt,.md-typeset .highlight .gt{color:#a00}.codehilite .gd,.md-typeset .highlight .gd{background-color:#fdd}.codehilite .gi,.md-typeset .highlight .gi{background-color:#dfd}.codehilite .k,.md-typeset .highlight .k{color:#3b78e7}.codehilite .kc,.md-typeset .highlight .kc{color:#a71d5d}.codehilite .kd,.codehilite .kn,.md-typeset .highlight .kd,.md-typeset .highlight .kn{color:#3b78e7}.codehilite .kp,.md-typeset .highlight .kp{color:#a71d5d}.codehilite .kr,.codehilite .kt,.md-typeset .highlight .kr,.md-typeset .highlight .kt{color:#3e61a2}.codehilite .c,.codehilite .cm,.md-typeset .highlight .c,.md-typeset .highlight .cm{color:#999}.codehilite .cp,.md-typeset .highlight .cp{color:#666}.codehilite .c1,.codehilite .ch,.codehilite .cs,.md-typeset .highlight .c1,.md-typeset .highlight .ch,.md-typeset .highlight .cs{color:#999}.codehilite .na,.codehilite .nb,.md-typeset .highlight .na,.md-typeset .highlight .nb{color:#c2185b}.codehilite .bp,.md-typeset .highlight .bp{color:#3e61a2}.codehilite .nc,.md-typeset .highlight .nc{color:#c2185b}.codehilite .no,.md-typeset .highlight .no{color:#3e61a2}.codehilite .nd,.codehilite .ni,.md-typeset .highlight .nd,.md-typeset .highlight .ni{color:#666}.codehilite .ne,.codehilite .nf,.md-typeset .highlight .ne,.md-typeset .highlight .nf{color:#c2185b}.codehilite .nl,.md-typeset .highlight .nl{color:#3b5179}.codehilite .nn,.md-typeset .highlight .nn{color:#ec407a}.codehilite .nt,.md-typeset .highlight .nt{color:#3b78e7}.codehilite .nv,.codehilite .vc,.codehilite .vg,.codehilite .vi,.md-typeset .highlight .nv,.md-typeset .highlight .vc,.md-typeset .highlight .vg,.md-typeset .highlight .vi{color:#3e61a2}.codehilite .nx,.md-typeset .highlight .nx{color:#ec407a}.codehilite .il,.codehilite .m,.codehilite .mf,.codehilite .mh,.codehilite .mi,.codehilite .mo,.md-typeset .highlight .il,.md-typeset .highlight .m,.md-typeset .highlight .mf,.md-typeset .highlight .mh,.md-typeset .highlight .mi,.md-typeset .highlight .mo{color:#e74c3c}.codehilite .s,.codehilite .sb,.codehilite .sc,.md-typeset .highlight .s,.md-typeset .highlight .sb,.md-typeset .highlight .sc{color:#0d904f}.codehilite .sd,.md-typeset .highlight .sd{color:#999}.codehilite .s2,.md-typeset .highlight .s2{color:#0d904f}.codehilite .se,.codehilite .sh,.codehilite .si,.codehilite .sx,.md-typeset .highlight .se,.md-typeset .highlight .sh,.md-typeset .highlight .si,.md-typeset .highlight .sx{color:#183691}.codehilite .sr,.md-typeset .highlight .sr{color:#009926}.codehilite .s1,.codehilite .ss,.md-typeset .highlight .s1,.md-typeset .highlight .ss{color:#0d904f}.codehilite .err,.md-typeset .highlight .err{color:#a61717}.codehilite .w,.md-typeset .highlight .w{color:transparent}.codehilite .hll,.md-typeset .highlight .hll{display:block;margin:0 -1.2rem;padding:0 1.2rem;background-color:rgba(255,235,59,.5)}.md-typeset .codehilite,.md-typeset .highlight{position:relative;margin:1em 0;padding:0;border-radius:.2rem;background-color:hsla(0,0%,93%,.5);color:#37474f;line-height:1.4;-webkit-overflow-scrolling:touch}.md-typeset .codehilite code,.md-typeset .codehilite pre,.md-typeset .highlight code,.md-typeset .highlight pre{display:block;margin:0;padding:1.05rem 1.2rem;background-color:transparent;overflow:auto;vertical-align:top}.md-typeset .codehilite code::-webkit-scrollbar,.md-typeset .codehilite pre::-webkit-scrollbar,.md-typeset .highlight code::-webkit-scrollbar,.md-typeset .highlight pre::-webkit-scrollbar{width:.4rem;height:.4rem}.md-typeset .codehilite code::-webkit-scrollbar-thumb,.md-typeset .codehilite pre::-webkit-scrollbar-thumb,.md-typeset .highlight code::-webkit-scrollbar-thumb,.md-typeset .highlight pre::-webkit-scrollbar-thumb{background-color:rgba(0,0,0,.26)}.md-typeset .codehilite code::-webkit-scrollbar-thumb:hover,.md-typeset .codehilite pre::-webkit-scrollbar-thumb:hover,.md-typeset .highlight code::-webkit-scrollbar-thumb:hover,.md-typeset .highlight pre::-webkit-scrollbar-thumb:hover{background-color:#536dfe}.md-typeset pre.codehilite,.md-typeset pre.highlight{overflow:visible}.md-typeset pre.codehilite code,.md-typeset pre.highlight code{display:block;padding:1.05rem 1.2rem;overflow:auto}.md-typeset .codehilitetable{display:block;margin:1em 0;border-radius:.2em;font-size:1.6rem;overflow:hidden}.md-typeset .codehilitetable tbody,.md-typeset .codehilitetable td{display:block;padding:0}.md-typeset .codehilitetable tr{display:flex}.md-typeset .codehilitetable .codehilite,.md-typeset .codehilitetable .highlight,.md-typeset .codehilitetable .linenodiv{margin:0;border-radius:0}.md-typeset .codehilitetable .linenodiv{padding:1.05rem 1.2rem}.md-typeset .codehilitetable .linenos{background-color:rgba(0,0,0,.07);color:rgba(0,0,0,.26);-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none}.md-typeset .codehilitetable .linenos pre{margin:0;padding:0;background-color:transparent;color:inherit;text-align:right}.md-typeset .codehilitetable .code{flex:1;overflow:hidden}.md-typeset>.codehilitetable{box-shadow:none}.md-typeset [id^="fnref:"]{display:inline-block}.md-typeset [id^="fnref:"]:target{margin-top:-7.6rem;padding-top:7.6rem;pointer-events:none}.md-typeset [id^="fn:"]:before{display:none;height:0;content:""}.md-typeset [id^="fn:"]:target:before{display:block;margin-top:-7rem;padding-top:7rem;pointer-events:none}.md-typeset .footnote{color:rgba(0,0,0,.54);font-size:1.28rem}.md-typeset .footnote ol{margin-left:0}.md-typeset .footnote li{transition:color .25s}.md-typeset .footnote li:target{color:rgba(0,0,0,.87)}.md-typeset .footnote li :first-child{margin-top:0}.md-typeset .footnote li:hover .footnote-backref,.md-typeset .footnote li:target .footnote-backref{-webkit-transform:translateX(0);transform:translateX(0);opacity:1}.md-typeset .footnote li:hover .footnote-backref:hover,.md-typeset .footnote li:target .footnote-backref{color:#536dfe}.md-typeset .footnote-ref{display:inline-block;pointer-events:auto}.md-typeset .footnote-ref:before{display:inline;margin:0 .2em;border-left:.1rem solid rgba(0,0,0,.26);font-size:1.25em;content:"";vertical-align:-.5rem}.md-typeset .footnote-backref{display:inline-block;-webkit-transform:translateX(.5rem);transform:translateX(.5rem);transition:color .25s,opacity .125s .125s,-webkit-transform .25s .125s;transition:transform .25s .125s,color .25s,opacity .125s .125s;transition:transform .25s .125s,color .25s,opacity .125s .125s,-webkit-transform .25s .125s;color:rgba(0,0,0,.26);font-size:0;opacity:0;vertical-align:text-bottom}[dir=rtl] .md-typeset .footnote-backref{-webkit-transform:translateX(-.5rem);transform:translateX(-.5rem)}.md-typeset .footnote-backref:before{display:inline-block;font-size:1.6rem;content:"\E31B"}[dir=rtl] .md-typeset .footnote-backref:before{-webkit-transform:scaleX(-1);transform:scaleX(-1)}.md-typeset .headerlink{display:inline-block;margin-left:1rem;-webkit-transform:translateY(.5rem);transform:translateY(.5rem);transition:color .25s,opacity .125s .25s,-webkit-transform .25s .25s;transition:transform .25s .25s,color .25s,opacity .125s .25s;transition:transform .25s .25s,color .25s,opacity .125s .25s,-webkit-transform .25s .25s;opacity:0}[dir=rtl] .md-typeset .headerlink{margin-right:1rem;margin-left:0}html body .md-typeset .headerlink{color:rgba(0,0,0,.26)}.md-typeset h1[id]:before{display:block;margin-top:-.9rem;padding-top:.9rem;content:""}.md-typeset h1[id]:target:before{margin-top:-6.9rem;padding-top:6.9rem}.md-typeset h1[id] .headerlink:focus,.md-typeset h1[id]:hover .headerlink,.md-typeset h1[id]:target .headerlink{-webkit-transform:translate(0);transform:translate(0);opacity:1}.md-typeset h1[id] .headerlink:focus,.md-typeset h1[id]:hover .headerlink:hover,.md-typeset h1[id]:target .headerlink{color:#536dfe}.md-typeset h2[id]:before{display:block;margin-top:-.8rem;padding-top:.8rem;content:""}.md-typeset h2[id]:target:before{margin-top:-6.8rem;padding-top:6.8rem}.md-typeset h2[id] .headerlink:focus,.md-typeset h2[id]:hover .headerlink,.md-typeset h2[id]:target .headerlink{-webkit-transform:translate(0);transform:translate(0);opacity:1}.md-typeset h2[id] .headerlink:focus,.md-typeset h2[id]:hover .headerlink:hover,.md-typeset h2[id]:target .headerlink{color:#536dfe}.md-typeset h3[id]:before{display:block;margin-top:-.9rem;padding-top:.9rem;content:""}.md-typeset h3[id]:target:before{margin-top:-6.9rem;padding-top:6.9rem}.md-typeset h3[id] .headerlink:focus,.md-typeset h3[id]:hover .headerlink,.md-typeset h3[id]:target .headerlink{-webkit-transform:translate(0);transform:translate(0);opacity:1}.md-typeset h3[id] .headerlink:focus,.md-typeset h3[id]:hover .headerlink:hover,.md-typeset h3[id]:target .headerlink{color:#536dfe}.md-typeset h4[id]:before{display:block;margin-top:-.9rem;padding-top:.9rem;content:""}.md-typeset h4[id]:target:before{margin-top:-6.9rem;padding-top:6.9rem}.md-typeset h4[id] .headerlink:focus,.md-typeset h4[id]:hover .headerlink,.md-typeset h4[id]:target .headerlink{-webkit-transform:translate(0);transform:translate(0);opacity:1}.md-typeset h4[id] .headerlink:focus,.md-typeset h4[id]:hover .headerlink:hover,.md-typeset h4[id]:target .headerlink{color:#536dfe}.md-typeset h5[id]:before{display:block;margin-top:-1.1rem;padding-top:1.1rem;content:""}.md-typeset h5[id]:target:before{margin-top:-7.1rem;padding-top:7.1rem}.md-typeset h5[id] .headerlink:focus,.md-typeset h5[id]:hover .headerlink,.md-typeset h5[id]:target .headerlink{-webkit-transform:translate(0);transform:translate(0);opacity:1}.md-typeset h5[id] .headerlink:focus,.md-typeset h5[id]:hover .headerlink:hover,.md-typeset h5[id]:target .headerlink{color:#536dfe}.md-typeset h6[id]:before{display:block;margin-top:-1.1rem;padding-top:1.1rem;content:""}.md-typeset h6[id]:target:before{margin-top:-7.1rem;padding-top:7.1rem}.md-typeset h6[id] .headerlink:focus,.md-typeset h6[id]:hover .headerlink,.md-typeset h6[id]:target .headerlink{-webkit-transform:translate(0);transform:translate(0);opacity:1}.md-typeset h6[id] .headerlink:focus,.md-typeset h6[id]:hover .headerlink:hover,.md-typeset h6[id]:target .headerlink{color:#536dfe}.md-typeset .MJXc-display{margin:.75em 0;padding:.75em 0;overflow:auto;-webkit-overflow-scrolling:touch}.md-typeset .MathJax_CHTML{outline:0}.md-typeset .critic.comment,.md-typeset del.critic,.md-typeset ins.critic{margin:0 .25em;padding:.0625em 0;border-radius:.2rem;-webkit-box-decoration-break:clone;box-decoration-break:clone}.md-typeset del.critic{background-color:#fdd;box-shadow:.25em 0 0 #fdd,-.25em 0 0 #fdd}.md-typeset ins.critic{background-color:#dfd;box-shadow:.25em 0 0 #dfd,-.25em 0 0 #dfd}.md-typeset .critic.comment{background-color:hsla(0,0%,93%,.5);color:#37474f;box-shadow:.25em 0 0 hsla(0,0%,93%,.5),-.25em 0 0 hsla(0,0%,93%,.5)}.md-typeset .critic.comment:before{padding-right:.125em;color:rgba(0,0,0,.26);content:"\E0B7";vertical-align:-.125em}.md-typeset .critic.block{display:block;margin:1em 0;padding-right:1.6rem;padding-left:1.6rem;box-shadow:none}.md-typeset .critic.block :first-child{margin-top:.5em}.md-typeset .critic.block :last-child{margin-bottom:.5em}.md-typeset details{padding-top:0}.md-typeset details[open]>summary:after{-webkit-transform:rotate(180deg);transform:rotate(180deg)}.md-typeset details:not([open]){padding-bottom:0}.md-typeset details:not([open])>summary{border-bottom:none}.md-typeset details summary{padding-right:4rem}[dir=rtl] .md-typeset details summary{padding-left:4rem}.no-details .md-typeset details:not([open])>*{display:none}.no-details .md-typeset details:not([open]) summary{display:block}.md-typeset summary{display:block;outline:none;cursor:pointer}.md-typeset summary::-webkit-details-marker{display:none}.md-typeset summary:after{position:absolute;top:.8rem;right:1.2rem;color:rgba(0,0,0,.26);font-size:2rem;content:"\E313"}[dir=rtl] .md-typeset summary:after{right:auto;left:1.2rem}.md-typeset .emojione{width:2rem;vertical-align:text-top}.md-typeset code.codehilite,.md-typeset code.highlight{margin:0 .29412em;padding:.07353em 0}.md-typeset .task-list-item{position:relative;list-style-type:none}.md-typeset .task-list-item [type=checkbox]{position:absolute;top:.45em;left:-2em}[dir=rtl] .md-typeset .task-list-item [type=checkbox]{right:-2em;left:auto}.md-typeset .task-list-control .task-list-indicator:before{position:absolute;top:.15em;left:-1.25em;color:rgba(0,0,0,.26);font-size:1.25em;content:"\E835";vertical-align:-.25em}[dir=rtl] .md-typeset .task-list-control .task-list-indicator:before{right:-1.25em;left:auto}.md-typeset .task-list-control [type=checkbox]:checked+.task-list-indicator:before{content:"\E834"}.md-typeset .task-list-control [type=checkbox]{opacity:0;z-index:-1}@media print{.md-typeset a:after{color:rgba(0,0,0,.54);content:" [" attr(href) "]"}.md-typeset code,.md-typeset pre{white-space:pre-wrap}.md-typeset code{box-shadow:none;-webkit-box-decoration-break:initial;box-decoration-break:slice}.md-clipboard,.md-content__icon,.md-footer,.md-header,.md-sidebar,.md-tabs,.md-typeset .headerlink{display:none}}@media only screen and (max-width:44.9375em){.md-typeset pre{margin:1em -1.6rem;border-radius:0}.md-typeset pre>code{padding:1.05rem 1.6rem}.md-footer-nav__link--prev .md-footer-nav__title{display:none}.md-search-result__teaser{max-height:5rem;-webkit-line-clamp:3}.codehilite .hll,.md-typeset .highlight .hll{margin:0 -1.6rem;padding:0 1.6rem}.md-typeset>.codehilite,.md-typeset>.highlight{margin:1em -1.6rem;border-radius:0}.md-typeset>.codehilite code,.md-typeset>.codehilite pre,.md-typeset>.highlight code,.md-typeset>.highlight pre{padding:1.05rem 1.6rem}.md-typeset>.codehilitetable{margin:1em -1.6rem;border-radius:0}.md-typeset>.codehilitetable .codehilite>code,.md-typeset>.codehilitetable .codehilite>pre,.md-typeset>.codehilitetable .highlight>code,.md-typeset>.codehilitetable .highlight>pre,.md-typeset>.codehilitetable .linenodiv{padding:1rem 1.6rem}.md-typeset>p>.MJXc-display{margin:.75em -1.6rem;padding:.25em 1.6rem}}@media only screen and (min-width:100em){html{font-size:68.75%}}@media only screen and (min-width:125em){html{font-size:75%}}@media only screen and (max-width:59.9375em){body[data-md-state=lock]{overflow:hidden}.ios body[data-md-state=lock] .md-container{display:none}html .md-nav__link[for=toc]{display:block;padding-right:4.8rem}html .md-nav__link[for=toc]:after{color:inherit;content:"\E8DE"}html .md-nav__link[for=toc]+.md-nav__link{display:none}html .md-nav__link[for=toc]~.md-nav{display:flex}html [dir=rtl] .md-nav__link{padding-right:1.6rem;padding-left:4.8rem}.md-nav__source{display:block;padding:0 .4rem;background-color:rgba(50,64,144,.9675);color:#fff}.md-search__overlay{position:absolute;top:.4rem;left:.4rem;width:3.6rem;height:3.6rem;-webkit-transform-origin:center;transform-origin:center;transition:opacity .2s .2s,-webkit-transform .3s .1s;transition:transform .3s .1s,opacity .2s .2s;transition:transform .3s .1s,opacity .2s .2s,-webkit-transform .3s .1s;border-radius:2rem;background-color:#fff;overflow:hidden;pointer-events:none}[dir=rtl] .md-search__overlay{right:.4rem;left:auto}[data-md-toggle=search]:checked~.md-header .md-search__overlay{transition:opacity .1s,-webkit-transform .4s;transition:transform .4s,opacity .1s;transition:transform .4s,opacity .1s,-webkit-transform .4s;opacity:1}.md-search__inner{position:fixed;top:0;left:100%;width:100%;height:100%;-webkit-transform:translateX(5%);transform:translateX(5%);transition:right 0s .3s,left 0s .3s,opacity .15s .15s,-webkit-transform .15s cubic-bezier(.4,0,.2,1) .15s;transition:right 0s .3s,left 0s .3s,transform .15s cubic-bezier(.4,0,.2,1) .15s,opacity .15s .15s;transition:right 0s .3s,left 0s .3s,transform .15s cubic-bezier(.4,0,.2,1) .15s,opacity .15s .15s,-webkit-transform .15s cubic-bezier(.4,0,.2,1) .15s;opacity:0;z-index:2}[data-md-toggle=search]:checked~.md-header .md-search__inner{left:0;-webkit-transform:translateX(0);transform:translateX(0);transition:right 0s 0s,left 0s 0s,opacity .15s .15s,-webkit-transform .15s cubic-bezier(.1,.7,.1,1) .15s;transition:right 0s 0s,left 0s 0s,transform .15s cubic-bezier(.1,.7,.1,1) .15s,opacity .15s .15s;transition:right 0s 0s,left 0s 0s,transform .15s cubic-bezier(.1,.7,.1,1) .15s,opacity .15s .15s,-webkit-transform .15s cubic-bezier(.1,.7,.1,1) .15s;opacity:1}[dir=rtl] [data-md-toggle=search]:checked~.md-header .md-search__inner{right:0;left:auto}html [dir=rtl] .md-search__inner{right:100%;left:auto;-webkit-transform:translateX(-5%);transform:translateX(-5%)}.md-search__input{width:100%;height:4.8rem;font-size:1.8rem}.md-search__icon[for=search]{top:1.2rem;left:1.6rem}.md-search__icon[for=search][for=search]:before{content:"\E5C4"}[dir=rtl] .md-search__icon[for=search][for=search]:before{content:"\E5C8"}.md-search__icon[type=reset]{top:1.2rem;right:1.6rem}.md-search__output{top:4.8rem;bottom:0}.md-search-result__article--document:before{display:none}}@media only screen and (max-width:76.1875em){[data-md-toggle=drawer]:checked~.md-overlay{width:100%;height:100%;transition:width 0s,height 0s,opacity .25s;opacity:1}.md-header-nav__button.md-icon--home,.md-header-nav__button.md-logo{display:none}.md-hero__inner{margin-top:4.8rem;margin-bottom:2.4rem}.md-nav{background-color:#fff}.md-nav--primary,.md-nav--primary .md-nav{display:flex;position:absolute;top:0;right:0;left:0;flex-direction:column;height:100%;z-index:1}.md-nav--primary .md-nav__item,.md-nav--primary .md-nav__title{font-size:1.6rem;line-height:1.5}html .md-nav--primary .md-nav__title{position:relative;height:11.2rem;padding:6rem 1.6rem .4rem;background-color:rgba(0,0,0,.07);color:rgba(0,0,0,.54);font-weight:400;line-height:4.8rem;white-space:nowrap;cursor:pointer}html .md-nav--primary .md-nav__title:before{display:block;position:absolute;top:.4rem;left:.4rem;width:4rem;height:4rem;color:rgba(0,0,0,.54)}html .md-nav--primary .md-nav__title~.md-nav__list{background-color:#fff;box-shadow:inset 0 .1rem 0 rgba(0,0,0,.07)}html .md-nav--primary .md-nav__title~.md-nav__list>.md-nav__item:first-child{border-top:0}html .md-nav--primary .md-nav__title--site{position:relative;background-color:#3f51b5;color:#fff}html .md-nav--primary .md-nav__title--site .md-nav__button{display:block;position:absolute;top:.4rem;left:.4rem;width:6.4rem;height:6.4rem;font-size:4.8rem}html .md-nav--primary .md-nav__title--site:before{display:none}html [dir=rtl] .md-nav--primary .md-nav__title--site .md-nav__button,html [dir=rtl] .md-nav--primary .md-nav__title:before{right:.4rem;left:auto}.md-nav--primary .md-nav__list{flex:1;overflow-y:auto}.md-nav--primary .md-nav__item{padding:0;border-top:.1rem solid rgba(0,0,0,.07)}[dir=rtl] .md-nav--primary .md-nav__item{padding:0}.md-nav--primary .md-nav__item--nested>.md-nav__link{padding-right:4.8rem}[dir=rtl] .md-nav--primary .md-nav__item--nested>.md-nav__link{padding-right:1.6rem;padding-left:4.8rem}.md-nav--primary .md-nav__item--nested>.md-nav__link:after{content:"\E315"}[dir=rtl] .md-nav--primary .md-nav__item--nested>.md-nav__link:after{content:"\E314"}.md-nav--primary .md-nav__link{position:relative;margin-top:0;padding:1.2rem 1.6rem}.md-nav--primary .md-nav__link:after{position:absolute;top:50%;right:1.2rem;margin-top:-1.2rem;color:inherit;font-size:2.4rem}[dir=rtl] .md-nav--primary .md-nav__link:after{right:auto;left:1.2rem}.md-nav--primary .md-nav--secondary .md-nav__link{position:static}.md-nav--primary .md-nav--secondary .md-nav{position:static;background-color:transparent}.md-nav--primary .md-nav--secondary .md-nav .md-nav__link{padding-left:2.8rem}[dir=rtl] .md-nav--primary .md-nav--secondary .md-nav .md-nav__link{padding-right:2.8rem;padding-left:0}.md-nav--primary .md-nav--secondary .md-nav .md-nav .md-nav__link{padding-left:4rem}[dir=rtl] .md-nav--primary .md-nav--secondary .md-nav .md-nav .md-nav__link{padding-right:4rem;padding-left:0}.md-nav--primary .md-nav--secondary .md-nav .md-nav .md-nav .md-nav__link{padding-left:5.2rem}[dir=rtl] .md-nav--primary .md-nav--secondary .md-nav .md-nav .md-nav .md-nav__link{padding-right:5.2rem;padding-left:0}.md-nav--primary .md-nav--secondary .md-nav .md-nav .md-nav .md-nav .md-nav__link{padding-left:6.4rem}[dir=rtl] .md-nav--primary .md-nav--secondary .md-nav .md-nav .md-nav .md-nav .md-nav__link{padding-right:6.4rem;padding-left:0}.md-nav__toggle~.md-nav{display:flex;-webkit-transform:translateX(100%);transform:translateX(100%);transition:opacity .125s .05s,-webkit-transform .25s cubic-bezier(.8,0,.6,1);transition:transform .25s cubic-bezier(.8,0,.6,1),opacity .125s .05s;transition:transform .25s cubic-bezier(.8,0,.6,1),opacity .125s .05s,-webkit-transform .25s cubic-bezier(.8,0,.6,1);opacity:0}[dir=rtl] .md-nav__toggle~.md-nav{-webkit-transform:translateX(-100%);transform:translateX(-100%)}.no-csstransforms3d .md-nav__toggle~.md-nav{display:none}.md-nav__toggle:checked~.md-nav{-webkit-transform:translateX(0);transform:translateX(0);transition:opacity .125s .125s,-webkit-transform .25s cubic-bezier(.4,0,.2,1);transition:transform .25s cubic-bezier(.4,0,.2,1),opacity .125s .125s;transition:transform .25s cubic-bezier(.4,0,.2,1),opacity .125s .125s,-webkit-transform .25s cubic-bezier(.4,0,.2,1);opacity:1}.no-csstransforms3d .md-nav__toggle:checked~.md-nav{display:flex}.md-sidebar--primary{position:fixed;top:0;left:-24.2rem;width:24.2rem;height:100%;-webkit-transform:translateX(0);transform:translateX(0);transition:box-shadow .25s,-webkit-transform .25s cubic-bezier(.4,0,.2,1);transition:transform .25s cubic-bezier(.4,0,.2,1),box-shadow .25s;transition:transform .25s cubic-bezier(.4,0,.2,1),box-shadow .25s,-webkit-transform .25s cubic-bezier(.4,0,.2,1);background-color:#fff;z-index:3}[dir=rtl] .md-sidebar--primary{right:-24.2rem;left:auto}.no-csstransforms3d .md-sidebar--primary{display:none}[data-md-toggle=drawer]:checked~.md-container .md-sidebar--primary{box-shadow:0 8px 10px 1px rgba(0,0,0,.14),0 3px 14px 2px rgba(0,0,0,.12),0 5px 5px -3px rgba(0,0,0,.4);-webkit-transform:translateX(24.2rem);transform:translateX(24.2rem)}[dir=rtl] [data-md-toggle=drawer]:checked~.md-container .md-sidebar--primary{-webkit-transform:translateX(-24.2rem);transform:translateX(-24.2rem)}.no-csstransforms3d [data-md-toggle=drawer]:checked~.md-container .md-sidebar--primary{display:block}.md-sidebar--primary .md-sidebar__scrollwrap{overflow:hidden;position:absolute;top:0;right:0;bottom:0;left:0;margin:0}.md-tabs{display:none}}@media only screen and (min-width:60em){.md-content{margin-right:24.2rem}[dir=rtl] .md-content{margin-right:0;margin-left:24.2rem}.md-header-nav__button.md-icon--search{display:none}.md-header-nav__source{display:block;width:23rem;max-width:23rem;margin-left:2.8rem;padding-right:1.2rem}[dir=rtl] .md-header-nav__source{margin-right:2.8rem;margin-left:0;padding-right:0;padding-left:1.2rem}.md-search{padding:.4rem}.md-search__overlay{position:fixed;top:0;left:0;width:0;height:0;transition:width 0s .25s,height 0s .25s,opacity .25s;background-color:rgba(0,0,0,.54);cursor:pointer}[dir=rtl] .md-search__overlay{right:0;left:auto}[data-md-toggle=search]:checked~.md-header .md-search__overlay{width:100%;height:100%;transition:width 0s,height 0s,opacity .25s;opacity:1}.md-search__inner{position:relative;width:23rem;padding:.2rem 0;float:right;transition:width .25s cubic-bezier(.1,.7,.1,1)}[dir=rtl] .md-search__inner{float:left}.md-search__form,.md-search__input{border-radius:.2rem}.md-search__input{width:100%;height:3.6rem;padding-left:4.4rem;transition:background-color .25s cubic-bezier(.1,.7,.1,1),color .25s cubic-bezier(.1,.7,.1,1);background-color:rgba(0,0,0,.26);color:inherit;font-size:1.6rem}[dir=rtl] .md-search__input{padding-right:4.4rem}.md-search__input+.md-search__icon{color:inherit}.md-search__input::-webkit-input-placeholder{color:hsla(0,0%,100%,.7)}.md-search__input:-ms-input-placeholder,.md-search__input::-ms-input-placeholder{color:hsla(0,0%,100%,.7)}.md-search__input::placeholder{color:hsla(0,0%,100%,.7)}.md-search__input:hover{background-color:hsla(0,0%,100%,.12)}[data-md-toggle=search]:checked~.md-header .md-search__input{border-radius:.2rem .2rem 0 0;background-color:#fff;color:rgba(0,0,0,.87);text-overflow:none}[data-md-toggle=search]:checked~.md-header .md-search__input+.md-search__icon,[data-md-toggle=search]:checked~.md-header .md-search__input::-webkit-input-placeholder{color:rgba(0,0,0,.54)}[data-md-toggle=search]:checked~.md-header .md-search__input+.md-search__icon,[data-md-toggle=search]:checked~.md-header .md-search__input:-ms-input-placeholder,[data-md-toggle=search]:checked~.md-header .md-search__input::-ms-input-placeholder{color:rgba(0,0,0,.54)}[data-md-toggle=search]:checked~.md-header .md-search__input+.md-search__icon,[data-md-toggle=search]:checked~.md-header .md-search__input::placeholder{color:rgba(0,0,0,.54)}.md-search__output{top:3.8rem;transition:opacity .4s;opacity:0}[data-md-toggle=search]:checked~.md-header .md-search__output{box-shadow:0 6px 10px 0 rgba(0,0,0,.14),0 1px 18px 0 rgba(0,0,0,.12),0 3px 5px -1px rgba(0,0,0,.4);opacity:1}.md-search__scrollwrap{max-height:0}[data-md-toggle=search]:checked~.md-header .md-search__scrollwrap{max-height:75vh}.md-search__scrollwrap::-webkit-scrollbar{width:.4rem;height:.4rem}.md-search__scrollwrap::-webkit-scrollbar-thumb{background-color:rgba(0,0,0,.26)}.md-search__scrollwrap::-webkit-scrollbar-thumb:hover{background-color:#536dfe}.md-search-result__meta{padding-left:4.4rem}[dir=rtl] .md-search-result__meta{padding-right:4.4rem;padding-left:0}.md-search-result__article{padding-left:4.4rem}[dir=rtl] .md-search-result__article{padding-right:4.4rem;padding-left:1.6rem}.md-sidebar--secondary{display:block;margin-left:100%;-webkit-transform:translate(-100%);transform:translate(-100%)}[dir=rtl] .md-sidebar--secondary{margin-right:100%;margin-left:0;-webkit-transform:translate(100%);transform:translate(100%)}}@media only screen and (min-width:76.25em){.md-content{margin-left:24.2rem}[dir=rtl] .md-content{margin-right:24.2rem}.md-content__inner{margin-right:2.4rem;margin-left:2.4rem}.md-header-nav__button.md-icon--menu{display:none}.md-nav[data-md-state=animate]{transition:max-height .25s cubic-bezier(.86,0,.07,1)}.md-nav__toggle~.md-nav{max-height:0;overflow:hidden}.no-js .md-nav__toggle~.md-nav{display:none}.md-nav[data-md-state=expand],.md-nav__toggle:checked~.md-nav{max-height:100%}.no-js .md-nav[data-md-state=expand],.no-js .md-nav__toggle:checked~.md-nav{display:block}.md-nav__item--nested>.md-nav>.md-nav__title{display:none}.md-nav__item--nested>.md-nav__link:after{display:inline-block;-webkit-transform-origin:.45em .45em;transform-origin:.45em .45em;-webkit-transform-style:preserve-3d;transform-style:preserve-3d;vertical-align:-.125em}.js .md-nav__item--nested>.md-nav__link:after{transition:-webkit-transform .4s;transition:transform .4s;transition:transform .4s,-webkit-transform .4s}.md-nav__item--nested .md-nav__toggle:checked~.md-nav__link:after{-webkit-transform:rotateX(180deg);transform:rotateX(180deg)}.md-search__scrollwrap,[data-md-toggle=search]:checked~.md-header .md-search__inner{width:68.8rem}.md-sidebar--secondary{margin-left:122rem}[dir=rtl] .md-sidebar--secondary{margin-right:122rem;margin-left:0}.md-tabs~.md-main .md-nav--primary>.md-nav__list>.md-nav__item--nested{font-size:0;visibility:hidden}.md-tabs--active~.md-main .md-nav--primary .md-nav__title{display:block;padding:0}.md-tabs--active~.md-main .md-nav--primary .md-nav__title--site{display:none}.no-js .md-tabs--active~.md-main .md-nav--primary .md-nav{display:block}.md-tabs--active~.md-main .md-nav--primary>.md-nav__list>.md-nav__item{font-size:0;visibility:hidden}.md-tabs--active~.md-main .md-nav--primary>.md-nav__list>.md-nav__item--nested{display:none;font-size:1.4rem;overflow:auto;visibility:visible}.md-tabs--active~.md-main .md-nav--primary>.md-nav__list>.md-nav__item--nested>.md-nav__link{display:none}.md-tabs--active~.md-main .md-nav--primary>.md-nav__list>.md-nav__item--active{display:block}.md-tabs--active~.md-main .md-nav[data-md-level="1"]{max-height:none;overflow:visible}.md-tabs--active~.md-main .md-nav[data-md-level="1"]>.md-nav__list>.md-nav__item{padding-left:0}.md-tabs--active~.md-main .md-nav[data-md-level="1"] .md-nav .md-nav__title{display:none}}@media only screen and (min-width:45em){.md-footer-nav__link{width:50%}.md-footer-copyright{max-width:75%;float:left}[dir=rtl] .md-footer-copyright{float:right}.md-footer-social{padding:1.2rem 0;float:right}[dir=rtl] .md-footer-social{float:left}}@media only screen and (max-width:29.9375em){[data-md-toggle=search]:checked~.md-header .md-search__overlay{-webkit-transform:scale(45);transform:scale(45)}}@media only screen and (min-width:30em) and (max-width:44.9375em){[data-md-toggle=search]:checked~.md-header .md-search__overlay{-webkit-transform:scale(60);transform:scale(60)}}@media only screen and (min-width:45em) and (max-width:59.9375em){[data-md-toggle=search]:checked~.md-header .md-search__overlay{-webkit-transform:scale(75);transform:scale(75)}}@media only screen and (min-width:60em) and (max-width:76.1875em){.md-search__scrollwrap,[data-md-toggle=search]:checked~.md-header .md-search__inner{width:46.8rem}.md-search-result__teaser{max-height:5rem;-webkit-line-clamp:3}} +/*# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJzb3VyY2VzIjpbXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6IiIsImZpbGUiOiJhc3NldHMvc3R5bGVzaGVldHMvYXBwbGljYXRpb24uNzhhYWIyZGMuY3NzIiwic291cmNlUm9vdCI6IiJ9*/ \ No newline at end of file diff --git a/docs/cli.md b/docs/cli.md deleted file mode 100644 index b4a4c579e..000000000 --- a/docs/cli.md +++ /dev/null @@ -1,1483 +0,0 @@ -# LBRY Command Line Documentation - -## blob_announce - -```text -Announce blobs to the DHT - -Usage: - blob_announce [--announce_all] [ | --blob_hash=] - [ | --stream_hash=] - [ | --sd_hash=] - - -Options: - --announce_all= : (bool) announce all the blobs possessed by user - --blob_hash= : (str) announce a blob, specified by blob_hash - --stream_hash= : (str) announce all blobs associated with - stream_hash - --sd_hash= : (str) announce all blobs associated with - sd_hash and the sd_hash itself - -Returns: - (bool) true if successful -``` - -## blob_availability - -```text -Get blob availability - -Usage: - blob_availability () [ | --search_timeout=] - [ | --blob_timeout=] - - -Options: - --blob_hash= : (str) check availability for this blob hash - --search_timeout= : (int) how long to search for peers for the blob - in the dht - --blob_timeout= : (int) how long to try downloading from a peer - -Returns: - (dict) { - "is_available": - "reachable_peers": [":"], - "unreachable_peers": [":"] - } -``` - -## blob_delete - -```text -Delete a blob - -Usage: - blob_delete ( | --blob_hash= : (str) blob hash of the blob to delete - -Returns: - (str) Success/fail message -``` - -## blob_get - -```text -Download and return a blob - -Usage: - blob_get ( | --blob_hash=) [--timeout=] - [--encoding=] [--payment_rate_manager=] - - -Options: - --blob_hash= : (str) blob hash of the blob to get - --timeout= : (int) timeout in number of seconds - --encoding= : (str) by default no attempt at decoding - is made, can be set to one of the - following decoders: - 'json' - --payment_rate_manager= : (str) if not given the default payment rate - manager will be used. - supported alternative rate managers: - 'only-free' - -Returns: - (str) Success/Fail message or (dict) decoded data -``` - -## blob_list - -```text -Returns blob hashes. If not given filters, returns all blobs known by the blob manager - -Usage: - blob_list [--needed] [--finished] [ | --uri=] - [ | --stream_hash=] - [ | --sd_hash=] - [ | --page_size=] - [ | --page=] - - -Options: - --needed : (bool) only return needed blobs - --finished : (bool) only return finished blobs - --uri= : (str) filter blobs by stream in a uri - --stream_hash= : (str) filter blobs by stream hash - --sd_hash= : (str) filter blobs by sd hash - --page_size= : (int) results page size - --page= : (int) page of results to return - -Returns: - (list) List of blob hashes -``` - -## blob_reflect_all - -```text -Reflects all saved blobs - -Usage: - blob_reflect_all - - -Options: - None - -Returns: - (bool) true if successful -``` - -## block_show - -```text -Get contents of a block - -Usage: - block_show ( | --blockhash=) | ( | --height=) - - -Options: - --blockhash= : (str) hash of the block to look up - --height= : (int) height of the block to look up - -Returns: - (dict) Requested block -``` - -## channel_export - -```text -Export serialized channel signing information for a given certificate claim id - -Usage: - channel_export ( | --claim_id=) - - -Options: - --claim_id= : (str) Claim ID to export information about - -Returns: - (str) Serialized certificate information -``` - -## channel_import - -```text -Import serialized channel signing information (to allow signing new claims to the channel) - -Usage: - channel_import ( | - --serialized_certificate_info=) - - -Options: - --serialized_certificate_info= : (str) certificate info - -Returns: - (dict) Result dictionary -``` - -## channel_list - -```text -Get certificate claim infos for channels that can be published to - -Usage: - channel_list - - -Options: - None - -Returns: - (list) ClaimDict, includes 'is_mine' field to indicate if the certificate claim - is in the wallet. -``` - -## channel_new - -```text -Generate a publisher key and create a new '@' prefixed certificate claim - -Usage: - channel_new ( | --channel_name=) - ( | --amount=) - - -Options: - --channel_name= : (str) name of the channel prefixed with '@' - --amount= : (float) bid amount on the channel - -Returns: - (dict) Dictionary containing result of the claim - { - 'tx' : (str) hex encoded transaction - 'txid' : (str) txid of resulting claim - 'nout' : (int) nout of the resulting claim - 'fee' : (float) fee paid for the claim transaction - 'claim_id' : (str) claim ID of the resulting claim - } -``` - -## claim_abandon - -```text -Abandon a name and reclaim credits from the claim - -Usage: - claim_abandon [ | --claim_id=] - [ | --txid=] [ | --nout=] - - -Options: - --claim_id= : (str) claim_id of the claim to abandon - --txid= : (str) txid of the claim to abandon - --nout= : (int) nout of the claim to abandon - -Returns: - (dict) Dictionary containing result of the claim - { - txid : (str) txid of resulting transaction - fee : (float) fee paid for the transaction - } -``` - -## claim_list - -```text -List current claims and information about them for a given name - -Usage: - claim_list ( | --name=) - - -Options: - --name= : (str) name of the claim to list info about - -Returns: - (dict) State of claims assigned for the name - { - 'claims': (list) list of claims for the name - [ - { - 'amount': (float) amount assigned to the claim - 'effective_amount': (float) total amount assigned to the claim, - including supports - 'claim_id': (str) claim ID of the claim - 'height': (int) height of block containing the claim - 'txid': (str) txid of the claim - 'nout': (int) nout of the claim - 'permanent_url': (str) permanent url of the claim, - 'supports': (list) a list of supports attached to the claim - 'value': (str) the value of the claim - }, - ] - 'supports_without_claims': (list) supports without any claims attached to them - 'last_takeover_height': (int) the height of last takeover for the name - } -``` - -## claim_list_by_channel - -```text -Get paginated claims in a channel specified by a channel uri - -Usage: - claim_list_by_channel ( | --uri=) [...] [--page=] - [--page_size=] - - -Options: - --uri= : (str) uri of the channel - --uris= : (list) uris of the channel - --page= : (int) which page of results to return where page 1 is the first - page, defaults to no pages - --page_size= : (int) number of results in a page, default of 10 - -Returns: - { - resolved channel uri: { - If there was an error: - 'error': (str) error message - - 'claims_in_channel': the total number of results for the channel, - - If a page of results was requested: - 'returned_page': page number returned, - 'claims_in_channel': [ - { - 'absolute_channel_position': (int) claim index number in sorted list of - claims which assert to be part of the - channel - 'address': (str) claim address, - 'amount': (float) claim amount, - 'effective_amount': (float) claim amount including supports, - 'claim_id': (str) claim id, - 'claim_sequence': (int) claim sequence number, - 'decoded_claim': (bool) whether or not the claim value was decoded, - 'height': (int) claim height, - 'depth': (int) claim depth, - 'has_signature': (bool) included if decoded_claim - 'name': (str) claim name, - 'supports: (list) list of supports [{'txid': (str) txid, - 'nout': (int) nout, - 'amount': (float) amount}], - 'txid': (str) claim txid, - 'nout': (str) claim nout, - 'signature_is_valid': (bool), included if has_signature, - 'value': ClaimDict if decoded, otherwise hex string - } - ], - } - } -``` - -## claim_list_mine - -```text -List my name claims - -Usage: - claim_list_mine - - -Options: - None - -Returns: - (list) List of name claims owned by user - [ - { - 'address': (str) address that owns the claim - 'amount': (float) amount assigned to the claim - 'blocks_to_expiration': (int) number of blocks until it expires - 'category': (str) "claim", "update" , or "support" - 'claim_id': (str) claim ID of the claim - 'confirmations': (int) number of blocks of confirmations for the claim - 'expiration_height': (int) the block height which the claim will expire - 'expired': (bool) true if expired, false otherwise - 'height': (int) height of the block containing the claim - 'is_spent': (bool) true if claim is abandoned, false otherwise - 'name': (str) name of the claim - 'permanent_url': (str) permanent url of the claim, - 'txid': (str) txid of the cliam - 'nout': (int) nout of the claim - 'value': (str) value of the claim - }, - ] -``` - -## claim_new_support - -```text -Support a name claim - -Usage: - claim_new_support ( | --name=) ( | --claim_id=) - ( | --amount=) - - -Options: - --name= : (str) name of the claim to support - --claim_id= : (str) claim_id of the claim to support - --amount= : (float) amount of support - -Returns: - (dict) Dictionary containing result of the claim - { - txid : (str) txid of resulting support claim - nout : (int) nout of the resulting support claim - fee : (float) fee paid for the transaction - } -``` - -## claim_renew - -```text -Renew claim(s) or support(s) - -Usage: - claim_renew ( | --outpoint=) | ( | --height=) - - -Options: - --outpoint= : (str) outpoint of the claim to renew - --height= : (str) update claims expiring before or at this block height - -Returns: - (dict) Dictionary where key is the the original claim's outpoint and - value is the result of the renewal - { - outpoint:{ - - 'tx' : (str) hex encoded transaction - 'txid' : (str) txid of resulting claim - 'nout' : (int) nout of the resulting claim - 'fee' : (float) fee paid for the claim transaction - 'claim_id' : (str) claim ID of the resulting claim - }, - } -``` - -## claim_send_to_address - -```text -Send a name claim to an address - -Usage: - claim_send_to_address ( | --claim_id=) - (
| --address=
) - [ | --amount=] - - -Options: - --claim_id= : (str) claim_id to send - --address=
: (str) address to send the claim to - --amount : (int) Amount of credits to claim name for, defaults to the current amount - on the claim - -Returns: - (dict) Dictionary containing result of the claim - { - 'tx' : (str) hex encoded transaction - 'txid' : (str) txid of resulting claim - 'nout' : (int) nout of the resulting claim - 'fee' : (float) fee paid for the claim transaction - 'claim_id' : (str) claim ID of the resulting claim - } -``` - -## claim_show - -```text -Resolve claim info from txid/nout or with claim ID - -Usage: - claim_show [ | --txid=] [ | --nout=] - [ | --claim_id=] - - -Options: - --txid= : (str) look for claim with this txid, nout must - also be specified - --nout= : (int) look for claim with this nout, txid must - also be specified - --claim_id= : (str) look for claim with this claim id - -Returns: - (dict) Dictionary containing claim info as below, - - { - 'txid': (str) txid of claim - 'nout': (int) nout of claim - 'amount': (float) amount of claim - 'value': (str) value of claim - 'height' : (int) height of claim takeover - 'claim_id': (str) claim ID of claim - 'supports': (list) list of supports associated with claim - } - - if claim cannot be resolved, dictionary as below will be returned - - { - 'error': (str) reason for error - } -``` - -## cli_test_command - -```text -This command is only for testing the CLI argument parsing -Usage: - cli_test_command [--a_arg] [--b_arg] ( | --pos_arg=) - [...] [--pos_arg2=] - [--pos_arg3=] - - -Options: - --a_arg : a arg - --b_arg : b arg - --pos_arg= : pos arg - --pos_args= : pos args - --pos_arg2= : pos arg 2 - --pos_arg3= : pos arg 3 - -Returns: - pos args -``` - -## commands - -```text -Return a list of available commands - -Usage: - commands - - -Options: - None - -Returns: - (list) list of available commands -``` - -## daemon_stop - -```text -Stop lbrynet-daemon - -Usage: - daemon_stop - - -Options: - None - -Returns: - (string) Shutdown message -``` - -## file_delete - -```text -Delete a LBRY file - -Usage: - file_delete [--delete_from_download_dir] [--delete_all] [--sd_hash=] [--file_name=] - [--stream_hash=] [--rowid=] [--claim_id=] [--txid=] - [--nout=] [--claim_name=] [--channel_claim_id=] - [--channel_name=] - - -Options: - --delete_from_download_dir : (bool) delete file from download directory, - instead of just deleting blobs - --delete_all : (bool) if there are multiple matching files, - allow the deletion of multiple files. - Otherwise do not delete anything. - --sd_hash= : (str) delete by file sd hash - --file_name : (str) delete by file name in downloads folder - --stream_hash= : (str) delete by file stream hash - --rowid= : (int) delete by file row id - --claim_id= : (str) delete by file claim id - --txid= : (str) delete by file claim txid - --nout= : (int) delete by file claim nout - --claim_name= : (str) delete by file claim name - --channel_claim_id= : (str) delete by file channel claim id - --channel_name= : (str) delete by file channel claim name - -Returns: - (bool) true if deletion was successful -``` - -## file_list - -```text -List files limited by optional filters - -Usage: - file_list [--sd_hash=] [--file_name=] [--stream_hash=] - [--rowid=] [--claim_id=] [--outpoint=] [--txid=] [--nout=] - [--channel_claim_id=] [--channel_name=] - [--claim_name=] [--full_status] - - -Options: - --sd_hash= : (str) get file with matching sd hash - --file_name= : (str) get file with matching file name in the - downloads folder - --stream_hash= : (str) get file with matching stream hash - --rowid= : (int) get file with matching row id - --claim_id= : (str) get file with matching claim id - --outpoint= : (str) get file with matching claim outpoint - --txid= : (str) get file with matching claim txid - --nout= : (int) get file with matching claim nout - --channel_claim_id= : (str) get file with matching channel claim id - --channel_name= : (str) get file with matching channel name - --claim_name= : (str) get file with matching claim name - --full_status : (bool) full status, populate the - 'message' and 'size' fields - -Returns: - (list) List of files - - [ - { - 'completed': (bool) true if download is completed, - 'file_name': (str) name of file, - 'download_directory': (str) download directory, - 'points_paid': (float) credit paid to download file, - 'stopped': (bool) true if download is stopped, - 'stream_hash': (str) stream hash of file, - 'stream_name': (str) stream name , - 'suggested_file_name': (str) suggested file name, - 'sd_hash': (str) sd hash of file, - 'download_path': (str) download path of file, - 'mime_type': (str) mime type of file, - 'key': (str) key attached to file, - 'total_bytes': (int) file size in bytes, None if full_status is false, - 'written_bytes': (int) written size in bytes, - 'blobs_completed': (int) num_completed, None if full_status is false, - 'blobs_in_stream': (int) None if full_status is false, - 'status': (str) downloader status, None if full_status is false, - 'claim_id': (str) None if full_status is false or if claim is not found, - 'outpoint': (str) None if full_status is false or if claim is not found, - 'txid': (str) None if full_status is false or if claim is not found, - 'nout': (int) None if full_status is false or if claim is not found, - 'metadata': (dict) None if full_status is false or if claim is not found, - 'channel_claim_id': (str) None if full_status is false or if claim is not found or signed, - 'channel_name': (str) None if full_status is false or if claim is not found or signed, - 'claim_name': (str) None if full_status is false or if claim is not found - }, - ] -``` - -## file_reflect - -```text -Reflect all the blobs in a file matching the filter criteria - -Usage: - file_reflect [--sd_hash=] [--file_name=] - [--stream_hash=] [--rowid=] - [--reflector=] - - -Options: - --sd_hash= : (str) get file with matching sd hash - --file_name= : (str) get file with matching file name in the - downloads folder - --stream_hash= : (str) get file with matching stream hash - --rowid= : (int) get file with matching row id - --reflector= : (str) reflector server, ip address or url - by default choose a server from the config - -Returns: - (list) list of blobs reflected -``` - -## file_set_status - -```text -Start or stop downloading a file - -Usage: - file_set_status ( | --status=) [--sd_hash=] - [--file_name=] [--stream_hash=] [--rowid=] - - -Options: - --status= : (str) one of "start" or "stop" - --sd_hash= : (str) set status of file with matching sd hash - --file_name= : (str) set status of file with matching file name in the - downloads folder - --stream_hash= : (str) set status of file with matching stream hash - --rowid= : (int) set status of file with matching row id - -Returns: - (str) Confirmation message -``` - -## get - -```text -Download stream from a LBRY name. - -Usage: - get [ | --file_name=] [ | --timeout=] - - - -Options: - --uri= : (str) uri of the content to download - --file_name= : (str) specified name for the downloaded file - --timeout= : (int) download timeout in number of seconds - -Returns: - (dict) Dictionary containing information about the stream - { - 'completed': (bool) true if download is completed, - 'file_name': (str) name of file, - 'download_directory': (str) download directory, - 'points_paid': (float) credit paid to download file, - 'stopped': (bool) true if download is stopped, - 'stream_hash': (str) stream hash of file, - 'stream_name': (str) stream name , - 'suggested_file_name': (str) suggested file name, - 'sd_hash': (str) sd hash of file, - 'download_path': (str) download path of file, - 'mime_type': (str) mime type of file, - 'key': (str) key attached to file, - 'total_bytes': (int) file size in bytes, None if full_status is false, - 'written_bytes': (int) written size in bytes, - 'blobs_completed': (int) num_completed, None if full_status is false, - 'blobs_in_stream': (int) None if full_status is false, - 'status': (str) downloader status, None if full_status is false, - 'claim_id': (str) claim id, - 'outpoint': (str) claim outpoint string, - 'txid': (str) claim txid, - 'nout': (int) claim nout, - 'metadata': (dict) claim metadata, - 'channel_claim_id': (str) None if claim is not signed - 'channel_name': (str) None if claim is not signed - 'claim_name': (str) claim name - } -``` - -## help - -```text -Return a useful message for an API command - -Usage: - help [ | --command=] - - -Options: - --command= : (str) command to retrieve documentation for - -Returns: - (str) Help message -``` - -## peer_list - -```text -Get peers for blob hash - -Usage: - peer_list ( | --blob_hash=) [ | --timeout=] - - -Options: - --blob_hash= : (str) find available peers for this blob hash - --timeout= : (int) peer search timeout in seconds - -Returns: - (list) List of contacts -``` - -## publish - -```text -Make a new name claim and publish associated data to lbrynet, -update over existing claim if user already has a claim for name. - -Fields required in the final Metadata are: - 'title' - 'description' - 'author' - 'language' - 'license' - 'nsfw' - -Metadata can be set by either using the metadata argument or by setting individual arguments -fee, title, description, author, language, license, license_url, thumbnail, preview, nsfw, -or sources. Individual arguments will overwrite the fields specified in metadata argument. - -Usage: - publish ( | --name=) ( | --bid=) [--metadata=] - [--file_path=] [--fee=] [--title=] - [--description=<description>] [--author=<author>] [--language=<language>] - [--license=<license>] [--license_url=<license_url>] [--thumbnail=<thumbnail>] - [--preview=<preview>] [--nsfw=<nsfw>] [--sources=<sources>] - [--channel_name=<channel_name>] [--channel_id=<channel_id>] - [--claim_address=<claim_address>] [--change_address=<change_address>] - - -Options: - --name=<name> : (str) name of the content - --bid=<bid> : (float) amount to back the claim - --metadata=<metadata> : (dict) ClaimDict to associate with the claim. - --file_path=<file_path> : (str) path to file to be associated with name. If provided, - a lbry stream of this file will be used in 'sources'. - If no path is given but a sources dict is provided, - it will be used. If neither are provided, an - error is raised. - --fee=<fee> : (dict) Dictionary representing key fee to download content: - { - 'currency': currency_symbol, - 'amount': float, - 'address': str, optional - } - supported currencies: LBC, USD, BTC - If an address is not provided a new one will be - automatically generated. Default fee is zero. - --title=<title> : (str) title of the publication - --description=<description> : (str) description of the publication - --author=<author> : (str) author of the publication - --language=<language> : (str) language of the publication - --license=<license> : (str) publication license - --license_url=<license_url> : (str) publication license url - --thumbnail=<thumbnail> : (str) thumbnail url - --preview=<preview> : (str) preview url - --nsfw=<nsfw> : (bool) title of the publication - --sources=<sources> : (str) {'lbry_sd_hash': sd_hash} specifies sd hash of file - --channel_name=<channel_name> : (str) name of the publisher channel name in the wallet - --channel_id=<channel_id> : (str) claim id of the publisher channel, does not check - for channel claim being in the wallet. This allows - publishing to a channel where only the certificate - private key is in the wallet. - --claim_address=<claim_address> : (str) address where the claim is sent to, if not specified - new address wil automatically be created - -Returns: - (dict) Dictionary containing result of the claim - { - 'tx' : (str) hex encoded transaction - 'txid' : (str) txid of resulting claim - 'nout' : (int) nout of the resulting claim - 'fee' : (float) fee paid for the claim transaction - 'claim_id' : (str) claim ID of the resulting claim - } -``` - -## report_bug - -```text -Report a bug to slack - -Usage: - report_bug (<message> | --message=<message>) - - -Options: - --message=<message> : (str) Description of the bug - -Returns: - (bool) true if successful -``` - -## resolve - -```text -Resolve given LBRY URIs - -Usage: - resolve [--force] (<uri> | --uri=<uri>) [<uris>...] - - -Options: - --force : (bool) force refresh and ignore cache - --uri=<uri> : (str) uri to resolve - --uris=<uris> : (list) uris to resolve - -Returns: - Dictionary of results, keyed by uri - '<uri>': { - If a resolution error occurs: - 'error': Error message - - If the uri resolves to a channel or a claim in a channel: - 'certificate': { - 'address': (str) claim address, - 'amount': (float) claim amount, - 'effective_amount': (float) claim amount including supports, - 'claim_id': (str) claim id, - 'claim_sequence': (int) claim sequence number, - 'decoded_claim': (bool) whether or not the claim value was decoded, - 'height': (int) claim height, - 'depth': (int) claim depth, - 'has_signature': (bool) included if decoded_claim - 'name': (str) claim name, - 'permanent_url': (str) permanent url of the certificate claim, - 'supports: (list) list of supports [{'txid': (str) txid, - 'nout': (int) nout, - 'amount': (float) amount}], - 'txid': (str) claim txid, - 'nout': (str) claim nout, - 'signature_is_valid': (bool), included if has_signature, - 'value': ClaimDict if decoded, otherwise hex string - } - - If the uri resolves to a channel: - 'claims_in_channel': (int) number of claims in the channel, - - If the uri resolves to a claim: - 'claim': { - 'address': (str) claim address, - 'amount': (float) claim amount, - 'effective_amount': (float) claim amount including supports, - 'claim_id': (str) claim id, - 'claim_sequence': (int) claim sequence number, - 'decoded_claim': (bool) whether or not the claim value was decoded, - 'height': (int) claim height, - 'depth': (int) claim depth, - 'has_signature': (bool) included if decoded_claim - 'name': (str) claim name, - 'permanent_url': (str) permanent url of the claim, - 'channel_name': (str) channel name if claim is in a channel - 'supports: (list) list of supports [{'txid': (str) txid, - 'nout': (int) nout, - 'amount': (float) amount}] - 'txid': (str) claim txid, - 'nout': (str) claim nout, - 'signature_is_valid': (bool), included if has_signature, - 'value': ClaimDict if decoded, otherwise hex string - } - } -``` - -## resolve_name - -```text -Resolve stream info from a LBRY name - -Usage: - resolve_name (<name> | --name=<name>) [--force] - - -Options: - --name=<name> : (str) the name to resolve - --force : (bool) force refresh and do not check cache - -Returns: - (dict) Metadata dictionary from name claim, None if the name is not - resolvable -``` - -## routing_table_get - -```text -Get DHT routing information - -Usage: - routing_table_get - - -Options: - None - -Returns: - (dict) dictionary containing routing and contact information - { - "buckets": { - <bucket index>: [ - { - "address": (str) peer address, - "node_id": (str) peer node id, - "blobs": (list) blob hashes announced by peer - } - ] - }, - "contacts": (list) contact node ids, - "blob_hashes": (list) all of the blob hashes stored by peers in the list of buckets, - "node_id": (str) the local dht node id - } -``` - -## settings_get - -```text -Get daemon settings - -Usage: - settings_get - - -Options: - None - -Returns: - (dict) Dictionary of daemon settings - See ADJUSTABLE_SETTINGS in lbrynet/conf.py for full list of settings -``` - -## settings_set - -```text -Set daemon settings - -Usage: - settings_set [--download_directory=<download_directory>] - [--data_rate=<data_rate>] - [--download_timeout=<download_timeout>] - [--peer_port=<peer_port>] - [--max_key_fee=<max_key_fee>] - [--disable_max_key_fee=<disable_max_key_fee>] - [--use_upnp=<use_upnp>] - [--run_reflector_server=<run_reflector_server>] - [--cache_time=<cache_time>] - [--reflect_uploads=<reflect_uploads>] - [--share_usage_data=<share_usage_data>] - [--peer_search_timeout=<peer_search_timeout>] - [--sd_download_timeout=<sd_download_timeout>] - [--auto_renew_claim_height_delta=<auto_renew_claim_height_delta>] - - -Options: - --download_directory=<download_directory> : (str) path of download directory - --data_rate=<data_rate> : (float) 0.0001 - --download_timeout=<download_timeout> : (int) 180 - --peer_port=<peer_port> : (int) 3333 - --max_key_fee=<max_key_fee> : (dict) maximum key fee for downloads, - in the format: - { - 'currency': <currency_symbol>, - 'amount': <amount> - }. - In the CLI, it must be an escaped JSON string - Supported currency symbols: LBC, USD, BTC - --disable_max_key_fee=<disable_max_key_fee> : (bool) False - --use_upnp=<use_upnp> : (bool) True - --run_reflector_server=<run_reflector_server> : (bool) False - --cache_time=<cache_time> : (int) 150 - --reflect_uploads=<reflect_uploads> : (bool) True - --share_usage_data=<share_usage_data> : (bool) True - --peer_search_timeout=<peer_search_timeout> : (int) 3 - --sd_download_timeout=<sd_download_timeout> : (int) 3 - --auto_renew_claim_height_delta=<auto_renew_claim_height_delta> : (int) 0 - claims set to expire within this many blocks will be - automatically renewed after startup (if set to 0, renews - will not be made automatically) - -Returns: - (dict) Updated dictionary of daemon settings -``` - -## status - -```text -Get daemon status - -Usage: - status [--session_status] [--dht_status] - - -Options: - --session_status : (bool) include session status in results - --dht_status : (bool) include dht network and peer status - -Returns: - (dict) lbrynet-daemon status - { - 'lbry_id': lbry peer id, base58, - 'installation_id': installation id, base58, - 'is_running': bool, - 'is_first_run': bool, - 'startup_status': { - 'code': status code, - 'message': status message - }, - 'connection_status': { - 'code': connection status code, - 'message': connection status message - }, - 'blockchain_status': { - 'blocks': local blockchain height, - 'blocks_behind': remote_height - local_height, - 'best_blockhash': block hash of most recent block, - }, - 'wallet_is_encrypted': bool, - - If given the session status option: - 'session_status': { - 'managed_blobs': count of blobs in the blob manager, - 'managed_streams': count of streams in the file manager - 'announce_queue_size': number of blobs currently queued to be announced - 'should_announce_blobs': number of blobs that should be announced - } - - If given the dht status option: - 'dht_status': { - 'kbps_received': current kbps receiving, - 'kbps_sent': current kdps being sent, - 'total_bytes_sent': total bytes sent, - 'total_bytes_received': total bytes received, - 'queries_received': number of queries received per second, - 'queries_sent': number of queries sent per second, - 'recent_contacts': count of recently contacted peers, - 'unique_contacts': count of unique peers - }, - } -``` - -## stream_availability - -```text -Get stream availability for lbry uri - -Usage: - stream_availability (<uri> | --uri=<uri>) - [<search_timeout> | --search_timeout=<search_timeout>] - [<blob_timeout> | --blob_timeout=<blob_timeout>] - - -Options: - --uri=<uri> : (str) check availability for this uri - --search_timeout=<search_timeout> : (int) how long to search for peers for the blob - in the dht - --search_timeout=<blob_timeout> : (int) how long to try downloading from a peer - -Returns: - (dict) { - 'is_available': <bool>, - 'did_decode': <bool>, - 'did_resolve': <bool>, - 'is_stream': <bool>, - 'num_blobs_in_stream': <int>, - 'sd_hash': <str>, - 'sd_blob_availability': <dict> see `blob_availability`, - 'head_blob_hash': <str>, - 'head_blob_availability': <dict> see `blob_availability`, - 'use_upnp': <bool>, - 'upnp_redirect_is_set': <bool>, - 'error': <None> | <str> error message - } -``` - -## stream_cost_estimate - -```text -Get estimated cost for a lbry stream - -Usage: - stream_cost_estimate (<uri> | --uri=<uri>) [<size> | --size=<size>] - - -Options: - --uri=<uri> : (str) uri to use - --size=<size> : (float) stream size in bytes. if provided an sd blob won't be - downloaded. - -Returns: - (float) Estimated cost in lbry credits, returns None if uri is not - resolvable -``` - -## transaction_list - -```text -List transactions belonging to wallet - -Usage: - transaction_list - - -Options: - None - -Returns: - (list) List of transactions - - { - "claim_info": (list) claim info if in txn [{ - "address": (str) address of claim, - "balance_delta": (float) bid amount, - "amount": (float) claim amount, - "claim_id": (str) claim id, - "claim_name": (str) claim name, - "nout": (int) nout - }], - "abandon_info": (list) abandon info if in txn [{ - "address": (str) address of abandoned claim, - "balance_delta": (float) returned amount, - "amount": (float) claim amount, - "claim_id": (str) claim id, - "claim_name": (str) claim name, - "nout": (int) nout - }], - "confirmations": (int) number of confirmations for the txn, - "date": (str) date and time of txn, - "fee": (float) txn fee, - "support_info": (list) support info if in txn [{ - "address": (str) address of support, - "balance_delta": (float) support amount, - "amount": (float) support amount, - "claim_id": (str) claim id, - "claim_name": (str) claim name, - "is_tip": (bool), - "nout": (int) nout - }], - "timestamp": (int) timestamp, - "txid": (str) txn id, - "update_info": (list) update info if in txn [{ - "address": (str) address of claim, - "balance_delta": (float) credited/debited - "amount": (float) absolute amount, - "claim_id": (str) claim id, - "claim_name": (str) claim name, - "nout": (int) nout - }], - "value": (float) value of txn - } -``` - -## transaction_show - -```text -Get a decoded transaction from a txid - -Usage: - transaction_show (<txid> | --txid=<txid>) - - -Options: - --txid=<txid> : (str) txid of the transaction - -Returns: - (dict) JSON formatted transaction -``` - -## utxo_list - -```text -List unspent transaction outputs - -Usage: - utxo_list - - -Options: - None - -Returns: - (list) List of unspent transaction outputs (UTXOs) - [ - { - "address": (str) the output address - "amount": (float) unspent amount - "height": (int) block height - "is_claim": (bool) is the tx a claim - "is_coinbase": (bool) is the tx a coinbase tx - "is_support": (bool) is the tx a support - "is_update": (bool) is the tx an update - "nout": (int) nout of the output - "txid": (str) txid of the output - }, - ... - ] -``` - -## version - -```text -Get lbry version information - -Usage: - version - - -Options: - None - -Returns: - (dict) Dictionary of lbry version information - { - 'build': (str) build type (e.g. "dev", "rc", "release"), - 'ip': (str) remote ip, if available, - 'lbrynet_version': (str) lbrynet_version, - 'lbryum_version': (str) lbryum_version, - 'lbryschema_version': (str) lbryschema_version, - 'os_release': (str) os release string - 'os_system': (str) os name - 'platform': (str) platform string - 'processor': (str) processor type, - 'python_version': (str) python version, - } -``` - -## wallet_balance - -```text -Return the balance of the wallet - -Usage: - wallet_balance [<address> | --address=<address>] [--include_unconfirmed] - - -Options: - --address=<address> : (str) If provided only the balance for this - address will be given - --include_unconfirmed : (bool) Include unconfirmed - -Returns: - (float) amount of lbry credits in wallet -``` - -## wallet_decrypt - -```text -Decrypt an encrypted wallet, this will remove the wallet password - -Usage: - wallet_decrypt - - -Options: - None - -Returns: - (bool) true if wallet is decrypted, otherwise false -``` - -## wallet_encrypt - -```text -Encrypt a wallet with a password, if the wallet is already encrypted this will update -the password - -Usage: - wallet_encrypt (<new_password> | --new_password=<new_password>) - - -Options: - --new_password=<new_password> : (str) password string to be used for encrypting wallet - -Returns: - (bool) true if wallet is decrypted, otherwise false -``` - -## wallet_is_address_mine - -```text -Checks if an address is associated with the current wallet. - -Usage: - wallet_is_address_mine (<address> | --address=<address>) - - -Options: - --address=<address> : (str) address to check - -Returns: - (bool) true, if address is associated with current wallet -``` - -## wallet_list - -```text -List wallet addresses - -Usage: - wallet_list - - -Options: - None - -Returns: - List of wallet addresses -``` - -## wallet_new_address - -```text -Generate a new wallet address - -Usage: - wallet_new_address - - -Options: - None - -Returns: - (str) New wallet address in base58 -``` - -## wallet_prefill_addresses - -```text -Create new addresses, each containing `amount` credits - -Usage: - wallet_prefill_addresses [--no_broadcast] - (<num_addresses> | --num_addresses=<num_addresses>) - (<amount> | --amount=<amount>) - - -Options: - --no_broadcast : (bool) whether to broadcast or not - --num_addresses=<num_addresses> : (int) num of addresses to create - --amount=<amount> : (float) initial amount in each address - -Returns: - (dict) the resulting transaction -``` - -## wallet_public_key - -```text -Get public key from wallet address - -Usage: - wallet_public_key (<address> | --address=<address>) - - -Options: - --address=<address> : (str) address for which to get the public key - -Returns: - (list) list of public keys associated with address. - Could contain more than one public key if multisig. -``` - -## wallet_send - -```text -Send credits. If given an address, send credits to it. If given a claim id, send a tip -to the owner of a claim specified by uri. A tip is a claim support where the recipient -of the support is the claim address for the claim being supported. - -Usage: - wallet_send (<amount> | --amount=<amount>) - ((<address> | --address=<address>) | (<claim_id> | --claim_id=<claim_id>)) - - -Options: - --amount=<amount> : (float) amount of credit to send - --address=<address> : (str) address to send credits to - --claim_id=<claim_id> : (float) claim_id of the claim to send to tip to - -Returns: - If sending to an address: - (bool) true if payment successfully scheduled - - If sending a claim tip: - (dict) Dictionary containing the result of the support - { - txid : (str) txid of resulting support claim - nout : (int) nout of the resulting support claim - fee : (float) fee paid for the transaction - } -``` - -## wallet_unlock - -```text -Unlock an encrypted wallet - -Usage: - wallet_unlock (<password> | --password=<password>) - - -Options: - --password=<password> : (str) password for unlocking wallet - -Returns: - (bool) true if wallet is unlocked, otherwise false -``` - -## wallet_unused_address - -```text -Return an address containing no balance, will create -a new address if there is none. - -Usage: - wallet_unused_address - - -Options: - None - -Returns: - (str) Unused wallet address in base58 -``` - diff --git a/docs/cli/index.html b/docs/cli/index.html new file mode 100644 index 000000000..73494cdc8 --- /dev/null +++ b/docs/cli/index.html @@ -0,0 +1,2530 @@ + + + + +<!DOCTYPE html> +<html lang="en" class="no-js"> + <head> + + <meta charset="utf-8"> + <meta name="viewport" content="width=device-width,initial-scale=1"> + <meta http-equiv="x-ua-compatible" content="ie=edge"> + + + + + <meta name="lang:clipboard.copy" content="Copy to clipboard"> + + <meta name="lang:clipboard.copied" content="Copied to clipboard"> + + <meta name="lang:search.language" content="en"> + + <meta name="lang:search.pipeline.stopwords" content="True"> + + <meta name="lang:search.pipeline.trimmer" content="True"> + + <meta name="lang:search.result.none" content="No matching documents"> + + <meta name="lang:search.result.one" content="1 matching document"> + + <meta name="lang:search.result.other" content="# matching documents"> + + <meta name="lang:search.tokenizer" content="[\s\-]+"> + + <link rel="shortcut icon" href="../assets/images/favicon.png"> + <meta name="generator" content="mkdocs-0.17.2, mkdocs-material-2.6.6"> + + + + <title>CLI - LBRY + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Skip to content + + + +
+ +
+ +
+ + + + +
+
+ + +
+
+
+ +
+
+
+ + +
+
+
+ + +
+
+
+ + +
+
+ + + + + +

LBRY Command Line Documentation

+

blob_announce

+
Announce blobs to the DHT
+
+Usage:
+    blob_announce [--announce_all] [<blob_hash> | --blob_hash=<blob_hash>]
+                  [<stream_hash> | --stream_hash=<stream_hash>]
+                  [<sd_hash> | --sd_hash=<sd_hash>]
+
+
+Options:
+    --announce_all=<announce_all>  :  (bool)  announce all the blobs possessed by user
+    --blob_hash=<blob_hash>        :  (str)   announce a blob, specified by blob_hash
+    --stream_hash=<stream_hash>    :  (str)   announce all blobs associated with
+                                              stream_hash
+    --sd_hash=<sd_hash>            :  (str)   announce all blobs associated with
+                                              sd_hash and the sd_hash itself
+
+Returns:
+    (bool) true if successful
+
+ +

blob_availability

+
Get blob availability
+
+Usage:
+    blob_availability (<blob_hash>) [<search_timeout> | --search_timeout=<search_timeout>]
+                      [<blob_timeout> | --blob_timeout=<blob_timeout>]
+
+
+Options:
+    --blob_hash=<blob_hash>            :  (str)  check availability for this blob hash
+    --search_timeout=<search_timeout>  :  (int)  how long to search for peers for the blob
+                                                 in the dht
+    --blob_timeout=<blob_timeout>      :  (int)  how long to try downloading from a peer
+
+Returns:
+    (dict) {
+        "is_available": <bool, true if blob is available from a peer from peer list>
+        "reachable_peers": ["<ip>:<port>"],
+        "unreachable_peers": ["<ip>:<port>"]
+    }
+
+ +

blob_delete

+
Delete a blob
+
+Usage:
+    blob_delete (<blob_hash> | --blob_hash=<blob_hash)
+
+
+Options:
+    --blob_hash=<blob_hash>  :  (str)  blob hash of the blob to delete
+
+Returns:
+    (str) Success/fail message
+
+ +

blob_get

+
Download and return a blob
+
+Usage:
+    blob_get (<blob_hash> | --blob_hash=<blob_hash>) [--timeout=<timeout>]
+             [--encoding=<encoding>] [--payment_rate_manager=<payment_rate_manager>]
+
+
+Options:
+    --blob_hash=<blob_hash>                        :  (str)  blob hash of the blob to get
+    --timeout=<timeout>                            :  (int)  timeout in number of seconds
+    --encoding=<encoding>                          :  (str)  by default no attempt at decoding
+                                                             is made, can be set to one of the
+                                                             following decoders:
+                                                             'json'
+    --payment_rate_manager=<payment_rate_manager>  :  (str)  if not given the default payment rate
+                                                             manager will be used.
+                                                             supported alternative rate managers:
+                                                             'only-free'
+
+Returns:
+    (str) Success/Fail message or (dict) decoded data
+
+ +

blob_list

+
Returns blob hashes. If not given filters, returns all blobs known by the blob manager
+
+Usage:
+    blob_list [--needed] [--finished] [<uri> | --uri=<uri>]
+              [<stream_hash> | --stream_hash=<stream_hash>]
+              [<sd_hash> | --sd_hash=<sd_hash>]
+              [<page_size> | --page_size=<page_size>]
+              [<page> | --page=<page>]
+
+
+Options:
+    --needed                     :  (bool)  only return needed blobs
+    --finished                   :  (bool)  only return finished blobs
+    --uri=<uri>                  :  (str)   filter blobs by stream in a uri
+    --stream_hash=<stream_hash>  :  (str)   filter blobs by stream hash
+    --sd_hash=<sd_hash>          :  (str)   filter blobs by sd hash
+    --page_size=<page_size>      :  (int)   results page size
+    --page=<page>                :  (int)   page of results to return
+
+Returns:
+    (list) List of blob hashes
+
+ +

blob_reflect_all

+
Reflects all saved blobs
+
+Usage:
+    blob_reflect_all
+
+
+Options:
+          None
+
+Returns:
+    (bool) true if successful
+
+ +

block_show

+
Get contents of a block
+
+Usage:
+    block_show (<blockhash> | --blockhash=<blockhash>) | (<height> | --height=<height>)
+
+
+Options:
+    --blockhash=<blockhash>  :  (str)  hash of the block to look up
+    --height=<height>        :  (int)  height of the block to look up
+
+Returns:
+    (dict) Requested block
+
+ +

channel_export

+
Export serialized channel signing information for a given certificate claim id
+
+Usage:
+    channel_export (<claim_id> | --claim_id=<claim_id>)
+
+
+Options:
+    --claim_id=<claim_id>  :  (str)  Claim ID to export information about
+
+Returns:
+    (str) Serialized certificate information
+
+ +

channel_import

+
Import serialized channel signing information (to allow signing new claims to the channel)
+
+Usage:
+    channel_import (<serialized_certificate_info> |
+                    --serialized_certificate_info=<serialized_certificate_info>)
+
+
+Options:
+    --serialized_certificate_info=<serialized_certificate_info>  :  (str)  certificate info
+
+Returns:
+    (dict) Result dictionary
+
+ +

channel_list

+
Get certificate claim infos for channels that can be published to
+
+Usage:
+    channel_list
+
+
+Options:
+          None
+
+Returns:
+    (list) ClaimDict, includes 'is_mine' field to indicate if the certificate claim
+    is in the wallet.
+
+ +

channel_new

+
Generate a publisher key and create a new '@' prefixed certificate claim
+
+Usage:
+    channel_new (<channel_name> | --channel_name=<channel_name>)
+                (<amount> | --amount=<amount>)
+
+
+Options:
+    --channel_name=<channel_name>  :  (str)    name of the channel prefixed with '@'
+    --amount=<amount>              :  (float)  bid amount on the channel
+
+Returns:
+    (dict) Dictionary containing result of the claim
+    {
+        'tx' : (str) hex encoded transaction
+        'txid' : (str) txid of resulting claim
+        'nout' : (int) nout of the resulting claim
+        'fee' : (float) fee paid for the claim transaction
+        'claim_id' : (str) claim ID of the resulting claim
+    }
+
+ +

claim_abandon

+
Abandon a name and reclaim credits from the claim
+
+Usage:
+    claim_abandon [<claim_id> | --claim_id=<claim_id>]
+                  [<txid> | --txid=<txid>] [<nout> | --nout=<nout>]
+
+
+Options:
+    --claim_id=<claim_id>  :  (str)  claim_id of the claim to abandon
+    --txid=<txid>          :  (str)  txid of the claim to abandon
+    --nout=<nout>          :  (int)  nout of the claim to abandon
+
+Returns:
+    (dict) Dictionary containing result of the claim
+    {
+        txid : (str) txid of resulting transaction
+        fee : (float) fee paid for the transaction
+    }
+
+ +

claim_list

+
List current claims and information about them for a given name
+
+Usage:
+    claim_list (<name> | --name=<name>)
+
+
+Options:
+    --name=<name>  :  (str)  name of the claim to list info about
+
+Returns:
+    (dict) State of claims assigned for the name
+    {
+        'claims': (list) list of claims for the name
+        [
+            {
+            'amount': (float) amount assigned to the claim
+            'effective_amount': (float) total amount assigned to the claim,
+                                including supports
+            'claim_id': (str) claim ID of the claim
+            'height': (int) height of block containing the claim
+            'txid': (str) txid of the claim
+            'nout': (int) nout of the claim
+            'permanent_url': (str) permanent url of the claim,
+            'supports': (list) a list of supports attached to the claim
+            'value': (str) the value of the claim
+            },
+        ]
+        'supports_without_claims': (list) supports without any claims attached to them
+        'last_takeover_height': (int) the height of last takeover for the name
+    }
+
+ +

claim_list_by_channel

+
Get paginated claims in a channel specified by a channel uri
+
+Usage:
+    claim_list_by_channel (<uri> | --uri=<uri>) [<uris>...] [--page=<page>]
+                           [--page_size=<page_size>]
+
+
+Options:
+    --uri=<uri>              :  (str)   uri of the channel
+    --uris=<uris>            :  (list)  uris of the channel
+    --page=<page>            :  (int)   which page of results to return where page 1 is the first
+                                        page, defaults to no pages
+    --page_size=<page_size>  :  (int)   number of results in a page, default of 10
+
+Returns:
+    {
+         resolved channel uri: {
+            If there was an error:
+            'error': (str) error message
+
+            'claims_in_channel': the total number of results for the channel,
+
+            If a page of results was requested:
+            'returned_page': page number returned,
+            'claims_in_channel': [
+                {
+                    'absolute_channel_position': (int) claim index number in sorted list of
+                                                 claims which assert to be part of the
+                                                 channel
+                    'address': (str) claim address,
+                    'amount': (float) claim amount,
+                    'effective_amount': (float) claim amount including supports,
+                    'claim_id': (str) claim id,
+                    'claim_sequence': (int) claim sequence number,
+                    'decoded_claim': (bool) whether or not the claim value was decoded,
+                    'height': (int) claim height,
+                    'depth': (int) claim depth,
+                    'has_signature': (bool) included if decoded_claim
+                    'name': (str) claim name,
+                    'supports: (list) list of supports [{'txid': (str) txid,
+                                                         'nout': (int) nout,
+                                                         'amount': (float) amount}],
+                    'txid': (str) claim txid,
+                    'nout': (str) claim nout,
+                    'signature_is_valid': (bool), included if has_signature,
+                    'value': ClaimDict if decoded, otherwise hex string
+                }
+            ],
+        }
+    }
+
+ +

claim_list_mine

+
List my name claims
+
+Usage:
+    claim_list_mine
+
+
+Options:
+          None
+
+Returns:
+    (list) List of name claims owned by user
+    [
+        {
+            'address': (str) address that owns the claim
+            'amount': (float) amount assigned to the claim
+            'blocks_to_expiration': (int) number of blocks until it expires
+            'category': (str) "claim", "update" , or "support"
+            'claim_id': (str) claim ID of the claim
+            'confirmations': (int) number of blocks of confirmations for the claim
+            'expiration_height': (int) the block height which the claim will expire
+            'expired': (bool) true if expired, false otherwise
+            'height': (int) height of the block containing the claim
+            'is_spent': (bool) true if claim is abandoned, false otherwise
+            'name': (str) name of the claim
+            'permanent_url': (str) permanent url of the claim,
+            'txid': (str) txid of the cliam
+            'nout': (int) nout of the claim
+            'value': (str) value of the claim
+        },
+   ]
+
+ +

claim_new_support

+
Support a name claim
+
+Usage:
+    claim_new_support (<name> | --name=<name>) (<claim_id> | --claim_id=<claim_id>)
+                      (<amount> | --amount=<amount>)
+
+
+Options:
+    --name=<name>          :  (str)    name of the claim to support
+    --claim_id=<claim_id>  :  (str)    claim_id of the claim to support
+    --amount=<amount>      :  (float)  amount of support
+
+Returns:
+    (dict) Dictionary containing result of the claim
+    {
+        txid : (str) txid of resulting support claim
+        nout : (int) nout of the resulting support claim
+        fee : (float) fee paid for the transaction
+    }
+
+ +

claim_renew

+
Renew claim(s) or support(s)
+
+Usage:
+    claim_renew (<outpoint> | --outpoint=<outpoint>) | (<height> | --height=<height>)
+
+
+Options:
+    --outpoint=<outpoint>  :  (str)  outpoint of the claim to renew
+    --height=<height>      :  (str)  update claims expiring before or at this block height
+
+Returns:
+    (dict) Dictionary where key is the the original claim's outpoint and
+    value is the result of the renewal
+    {
+        outpoint:{
+
+            'tx' : (str) hex encoded transaction
+            'txid' : (str) txid of resulting claim
+            'nout' : (int) nout of the resulting claim
+            'fee' : (float) fee paid for the claim transaction
+            'claim_id' : (str) claim ID of the resulting claim
+        },
+    }
+
+ +

claim_send_to_address

+
Send a name claim to an address
+
+Usage:
+    claim_send_to_address (<claim_id> | --claim_id=<claim_id>)
+                          (<address> | --address=<address>)
+                          [<amount> | --amount=<amount>]
+
+
+Options:
+    --claim_id=<claim_id>  :  (str)  claim_id to send
+    --address=<address>    :  (str)  address to send the claim to
+    --amount<amount>       :  (int)  Amount of credits to claim name for, defaults to the current amount
+                                     on the claim
+
+Returns:
+    (dict) Dictionary containing result of the claim
+    {
+        'tx' : (str) hex encoded transaction
+        'txid' : (str) txid of resulting claim
+        'nout' : (int) nout of the resulting claim
+        'fee' : (float) fee paid for the claim transaction
+        'claim_id' : (str) claim ID of the resulting claim
+    }
+
+ +

claim_show

+
Resolve claim info from txid/nout or with claim ID
+
+Usage:
+    claim_show [<txid> | --txid=<txid>] [<nout> | --nout=<nout>]
+               [<claim_id> | --claim_id=<claim_id>]
+
+
+Options:
+    --txid=<txid>          :  (str)  look for claim with this txid, nout must
+                                     also be specified
+    --nout=<nout>          :  (int)  look for claim with this nout, txid must
+                                     also be specified
+    --claim_id=<claim_id>  :  (str)  look for claim with this claim id
+
+Returns:
+    (dict) Dictionary containing claim info as below,
+
+    {
+        'txid': (str) txid of claim
+        'nout': (int) nout of claim
+        'amount': (float) amount of claim
+        'value': (str) value of claim
+        'height' : (int) height of claim takeover
+        'claim_id': (str) claim ID of claim
+        'supports': (list) list of supports associated with claim
+    }
+
+    if claim cannot be resolved, dictionary as below will be returned
+
+    {
+        'error': (str) reason for error
+    }
+
+ +

cli_test_command

+
This command is only for testing the CLI argument parsing
+Usage:
+    cli_test_command [--a_arg] [--b_arg] (<pos_arg> | --pos_arg=<pos_arg>)
+                     [<pos_args>...] [--pos_arg2=<pos_arg2>]
+                     [--pos_arg3=<pos_arg3>]
+
+
+Options:
+    --a_arg                :  a    arg
+    --b_arg                :  b    arg
+    --pos_arg=<pos_arg>    :  pos  arg
+    --pos_args=<pos_args>  :  pos  args
+    --pos_arg2=<pos_arg2>  :  pos  arg 2
+    --pos_arg3=<pos_arg3>  :  pos  arg 3
+
+Returns:
+    pos args
+
+ +

commands

+
Return a list of available commands
+
+Usage:
+    commands
+
+
+Options:
+          None
+
+Returns:
+    (list) list of available commands
+
+ +

daemon_stop

+
Stop lbrynet-daemon
+
+Usage:
+    daemon_stop
+
+
+Options:
+          None
+
+Returns:
+    (string) Shutdown message
+
+ +

file_delete

+
Delete a LBRY file
+
+Usage:
+    file_delete [--delete_from_download_dir] [--delete_all] [--sd_hash=<sd_hash>] [--file_name=<file_name>]
+                [--stream_hash=<stream_hash>] [--rowid=<rowid>] [--claim_id=<claim_id>] [--txid=<txid>]
+                [--nout=<nout>] [--claim_name=<claim_name>] [--channel_claim_id=<channel_claim_id>]
+                [--channel_name=<channel_name>]
+
+
+Options:
+    --delete_from_download_dir             :  (bool)  delete file from download directory,
+                                                      instead of just deleting blobs
+    --delete_all                           :  (bool)  if there are multiple matching files,
+                                                      allow the deletion of multiple files.
+                                                      Otherwise do not delete anything.
+    --sd_hash=<sd_hash>                    :  (str)   delete by file sd hash
+    --file_name<file_name>                 :  (str)   delete by file name in downloads folder
+    --stream_hash=<stream_hash>            :  (str)   delete by file stream hash
+    --rowid=<rowid>                        :  (int)   delete by file row id
+    --claim_id=<claim_id>                  :  (str)   delete by file claim id
+    --txid=<txid>                          :  (str)   delete by file claim txid
+    --nout=<nout>                          :  (int)   delete by file claim nout
+    --claim_name=<claim_name>              :  (str)   delete by file claim name
+    --channel_claim_id=<channel_claim_id>  :  (str)   delete by file channel claim id
+    --channel_name=<channel_name>          :  (str)   delete by file channel claim name
+
+Returns:
+    (bool) true if deletion was successful
+
+ +

file_list

+
List files limited by optional filters
+
+Usage:
+    file_list [--sd_hash=<sd_hash>] [--file_name=<file_name>] [--stream_hash=<stream_hash>]
+              [--rowid=<rowid>] [--claim_id=<claim_id>] [--outpoint=<outpoint>] [--txid=<txid>] [--nout=<nout>]
+              [--channel_claim_id=<channel_claim_id>] [--channel_name=<channel_name>]
+              [--claim_name=<claim_name>] [--full_status]
+
+
+Options:
+    --sd_hash=<sd_hash>                    :  (str)   get file with matching sd hash
+    --file_name=<file_name>                :  (str)   get file with matching file name in the
+                                                      downloads folder
+    --stream_hash=<stream_hash>            :  (str)   get file with matching stream hash
+    --rowid=<rowid>                        :  (int)   get file with matching row id
+    --claim_id=<claim_id>                  :  (str)   get file with matching claim id
+    --outpoint=<outpoint>                  :  (str)   get file with matching claim outpoint
+    --txid=<txid>                          :  (str)   get file with matching claim txid
+    --nout=<nout>                          :  (int)   get file with matching claim nout
+    --channel_claim_id=<channel_claim_id>  :  (str)   get file with matching channel claim id
+    --channel_name=<channel_name>          :  (str)   get file with matching channel name
+    --claim_name=<claim_name>              :  (str)   get file with matching claim name
+    --full_status                          :  (bool)  full status, populate the
+                                                      'message' and 'size' fields
+
+Returns:
+    (list) List of files
+
+    [
+        {
+            'completed': (bool) true if download is completed,
+            'file_name': (str) name of file,
+            'download_directory': (str) download directory,
+            'points_paid': (float) credit paid to download file,
+            'stopped': (bool) true if download is stopped,
+            'stream_hash': (str) stream hash of file,
+            'stream_name': (str) stream name ,
+            'suggested_file_name': (str) suggested file name,
+            'sd_hash': (str) sd hash of file,
+            'download_path': (str) download path of file,
+            'mime_type': (str) mime type of file,
+            'key': (str) key attached to file,
+            'total_bytes': (int) file size in bytes, None if full_status is false,
+            'written_bytes': (int) written size in bytes,
+            'blobs_completed': (int) num_completed, None if full_status is false,
+            'blobs_in_stream': (int) None if full_status is false,
+            'status': (str) downloader status, None if full_status is false,
+            'claim_id': (str) None if full_status is false or if claim is not found,
+            'outpoint': (str) None if full_status is false or if claim is not found,
+            'txid': (str) None if full_status is false or if claim is not found,
+            'nout': (int) None if full_status is false or if claim is not found,
+            'metadata': (dict) None if full_status is false or if claim is not found,
+            'channel_claim_id': (str) None if full_status is false or if claim is not found or signed,
+            'channel_name': (str) None if full_status is false or if claim is not found or signed,
+            'claim_name': (str) None if full_status is false or if claim is not found
+        },
+    ]
+
+ +

file_reflect

+
Reflect all the blobs in a file matching the filter criteria
+
+Usage:
+    file_reflect [--sd_hash=<sd_hash>] [--file_name=<file_name>]
+                 [--stream_hash=<stream_hash>] [--rowid=<rowid>]
+                 [--reflector=<reflector>]
+
+
+Options:
+    --sd_hash=<sd_hash>          :  (str)  get file with matching sd hash
+    --file_name=<file_name>      :  (str)  get file with matching file name in the
+                                           downloads folder
+    --stream_hash=<stream_hash>  :  (str)  get file with matching stream hash
+    --rowid=<rowid>              :  (int)  get file with matching row id
+    --reflector=<reflector>      :  (str)  reflector server, ip address or url
+                                           by default choose a server from the config
+
+Returns:
+    (list) list of blobs reflected
+
+ +

file_set_status

+
Start or stop downloading a file
+
+Usage:
+    file_set_status (<status> | --status=<status>) [--sd_hash=<sd_hash>]
+              [--file_name=<file_name>] [--stream_hash=<stream_hash>] [--rowid=<rowid>]
+
+
+Options:
+    --status=<status>            :  (str)  one of "start" or "stop"
+    --sd_hash=<sd_hash>          :  (str)  set status of file with matching sd hash
+    --file_name=<file_name>      :  (str)  set status of file with matching file name in the
+                                           downloads folder
+    --stream_hash=<stream_hash>  :  (str)  set status of file with matching stream hash
+    --rowid=<rowid>              :  (int)  set status of file with matching row id
+
+Returns:
+    (str) Confirmation message
+
+ +

get

+
Download stream from a LBRY name.
+
+Usage:
+    get <uri> [<file_name> | --file_name=<file_name>] [<timeout> | --timeout=<timeout>]
+
+
+
+Options:
+    --uri=<uri>              :  (str)  uri of the content to download
+    --file_name=<file_name>  :  (str)  specified name for the downloaded file
+    --timeout=<timeout>      :  (int)  download timeout in number of seconds
+
+Returns:
+    (dict) Dictionary containing information about the stream
+    {
+        'completed': (bool) true if download is completed,
+        'file_name': (str) name of file,
+        'download_directory': (str) download directory,
+        'points_paid': (float) credit paid to download file,
+        'stopped': (bool) true if download is stopped,
+        'stream_hash': (str) stream hash of file,
+        'stream_name': (str) stream name ,
+        'suggested_file_name': (str) suggested file name,
+        'sd_hash': (str) sd hash of file,
+        'download_path': (str) download path of file,
+        'mime_type': (str) mime type of file,
+        'key': (str) key attached to file,
+        'total_bytes': (int) file size in bytes, None if full_status is false,
+        'written_bytes': (int) written size in bytes,
+        'blobs_completed': (int) num_completed, None if full_status is false,
+        'blobs_in_stream': (int) None if full_status is false,
+        'status': (str) downloader status, None if full_status is false,
+        'claim_id': (str) claim id,
+        'outpoint': (str) claim outpoint string,
+        'txid': (str) claim txid,
+        'nout': (int) claim nout,
+        'metadata': (dict) claim metadata,
+        'channel_claim_id': (str) None if claim is not signed
+        'channel_name': (str) None if claim is not signed
+        'claim_name': (str) claim name
+    }
+
+ +

help

+
Return a useful message for an API command
+
+Usage:
+    help [<command> | --command=<command>]
+
+
+Options:
+    --command=<command>  :  (str)  command to retrieve documentation for
+
+Returns:
+    (str) Help message
+
+ +

peer_list

+
Get peers for blob hash
+
+Usage:
+    peer_list (<blob_hash> | --blob_hash=<blob_hash>) [<timeout> | --timeout=<timeout>]
+
+
+Options:
+    --blob_hash=<blob_hash>  :  (str)  find available peers for this blob hash
+    --timeout=<timeout>      :  (int)  peer search timeout in seconds
+
+Returns:
+    (list) List of contacts
+
+ +

publish

+
Make a new name claim and publish associated data to lbrynet,
+update over existing claim if user already has a claim for name.
+
+Fields required in the final Metadata are:
+    'title'
+    'description'
+    'author'
+    'language'
+    'license'
+    'nsfw'
+
+Metadata can be set by either using the metadata argument or by setting individual arguments
+fee, title, description, author, language, license, license_url, thumbnail, preview, nsfw,
+or sources. Individual arguments will overwrite the fields specified in metadata argument.
+
+Usage:
+    publish (<name> | --name=<name>) (<bid> | --bid=<bid>) [--metadata=<metadata>]
+            [--file_path=<file_path>] [--fee=<fee>] [--title=<title>]
+            [--description=<description>] [--author=<author>] [--language=<language>]
+            [--license=<license>] [--license_url=<license_url>] [--thumbnail=<thumbnail>]
+            [--preview=<preview>] [--nsfw=<nsfw>] [--sources=<sources>]
+            [--channel_name=<channel_name>] [--channel_id=<channel_id>]
+            [--claim_address=<claim_address>] [--change_address=<change_address>]
+
+
+Options:
+    --name=<name>                    :  (str)    name of the content
+    --bid=<bid>                      :  (float)  amount to back the claim
+    --metadata=<metadata>            :  (dict)   ClaimDict to associate with the claim.
+    --file_path=<file_path>          :  (str)    path to file to be associated with name. If provided,
+                                                 a lbry stream of this file will be used in 'sources'.
+                                                 If no path is given but a sources dict is provided,
+                                                 it will be used. If neither are provided, an
+                                                 error is raised.
+    --fee=<fee>                      :  (dict)   Dictionary representing key fee to download content:
+                                                 {
+                                                 'currency': currency_symbol,
+                                                 'amount': float,
+                                                 'address': str, optional
+                                                 }
+                                                 supported currencies: LBC, USD, BTC
+                                                 If an address is not provided a new one will be
+                                                 automatically generated. Default fee is zero.
+    --title=<title>                  :  (str)    title of the publication
+    --description=<description>      :  (str)    description of the publication
+    --author=<author>                :  (str)    author of the publication
+    --language=<language>            :  (str)    language of the publication
+    --license=<license>              :  (str)    publication license
+    --license_url=<license_url>      :  (str)    publication license url
+    --thumbnail=<thumbnail>          :  (str)    thumbnail url
+    --preview=<preview>              :  (str)    preview url
+    --nsfw=<nsfw>                    :  (bool)   title of the publication
+    --sources=<sources>              :  (str)    {'lbry_sd_hash': sd_hash} specifies sd hash of file
+    --channel_name=<channel_name>    :  (str)    name of the publisher channel name in the wallet
+    --channel_id=<channel_id>        :  (str)    claim id of the publisher channel, does not check
+                                                 for channel claim being in the wallet. This allows
+                                                 publishing to a channel where only the certificate
+                                                 private key is in the wallet.
+    --claim_address=<claim_address>  :  (str)    address where the claim is sent to, if not specified
+                                                 new address wil automatically be created
+
+Returns:
+    (dict) Dictionary containing result of the claim
+    {
+        'tx' : (str) hex encoded transaction
+        'txid' : (str) txid of resulting claim
+        'nout' : (int) nout of the resulting claim
+        'fee' : (float) fee paid for the claim transaction
+        'claim_id' : (str) claim ID of the resulting claim
+    }
+
+ +

report_bug

+
Report a bug to slack
+
+Usage:
+    report_bug (<message> | --message=<message>)
+
+
+Options:
+    --message=<message>  :  (str)  Description of the bug
+
+Returns:
+    (bool) true if successful
+
+ +

resolve

+
Resolve given LBRY URIs
+
+Usage:
+    resolve [--force] (<uri> | --uri=<uri>) [<uris>...]
+
+
+Options:
+    --force        :  (bool)  force refresh and ignore cache
+    --uri=<uri>    :  (str)   uri to resolve
+    --uris=<uris>  :  (list)  uris to resolve
+
+Returns:
+    Dictionary of results, keyed by uri
+    '<uri>': {
+            If a resolution error occurs:
+            'error': Error message
+
+            If the uri resolves to a channel or a claim in a channel:
+            'certificate': {
+                'address': (str) claim address,
+                'amount': (float) claim amount,
+                'effective_amount': (float) claim amount including supports,
+                'claim_id': (str) claim id,
+                'claim_sequence': (int) claim sequence number,
+                'decoded_claim': (bool) whether or not the claim value was decoded,
+                'height': (int) claim height,
+                'depth': (int) claim depth,
+                'has_signature': (bool) included if decoded_claim
+                'name': (str) claim name,
+                'permanent_url': (str) permanent url of the certificate claim,
+                'supports: (list) list of supports [{'txid': (str) txid,
+                                                     'nout': (int) nout,
+                                                     'amount': (float) amount}],
+                'txid': (str) claim txid,
+                'nout': (str) claim nout,
+                'signature_is_valid': (bool), included if has_signature,
+                'value': ClaimDict if decoded, otherwise hex string
+            }
+
+            If the uri resolves to a channel:
+            'claims_in_channel': (int) number of claims in the channel,
+
+            If the uri resolves to a claim:
+            'claim': {
+                'address': (str) claim address,
+                'amount': (float) claim amount,
+                'effective_amount': (float) claim amount including supports,
+                'claim_id': (str) claim id,
+                'claim_sequence': (int) claim sequence number,
+                'decoded_claim': (bool) whether or not the claim value was decoded,
+                'height': (int) claim height,
+                'depth': (int) claim depth,
+                'has_signature': (bool) included if decoded_claim
+                'name': (str) claim name,
+                'permanent_url': (str) permanent url of the claim,
+                'channel_name': (str) channel name if claim is in a channel
+                'supports: (list) list of supports [{'txid': (str) txid,
+                                                     'nout': (int) nout,
+                                                     'amount': (float) amount}]
+                'txid': (str) claim txid,
+                'nout': (str) claim nout,
+                'signature_is_valid': (bool), included if has_signature,
+                'value': ClaimDict if decoded, otherwise hex string
+            }
+    }
+
+ +

resolve_name

+
Resolve stream info from a LBRY name
+
+Usage:
+    resolve_name (<name> | --name=<name>) [--force]
+
+
+Options:
+    --name=<name>  :  (str)   the name to resolve
+    --force        :  (bool)  force refresh and do not check cache
+
+Returns:
+    (dict) Metadata dictionary from name claim, None if the name is not
+            resolvable
+
+ +

routing_table_get

+
Get DHT routing information
+
+Usage:
+    routing_table_get
+
+
+Options:
+          None
+
+Returns:
+    (dict) dictionary containing routing and contact information
+    {
+        "buckets": {
+            <bucket index>: [
+                {
+                    "address": (str) peer address,
+                    "node_id": (str) peer node id,
+                    "blobs": (list) blob hashes announced by peer
+                }
+            ]
+        },
+        "contacts": (list) contact node ids,
+        "blob_hashes": (list) all of the blob hashes stored by peers in the list of buckets,
+        "node_id": (str) the local dht node id
+    }
+
+ +

settings_get

+
Get daemon settings
+
+Usage:
+    settings_get
+
+
+Options:
+          None
+
+Returns:
+    (dict) Dictionary of daemon settings
+    See ADJUSTABLE_SETTINGS in lbrynet/conf.py for full list of settings
+
+ +

settings_set

+
Set daemon settings
+
+Usage:
+    settings_set [--download_directory=<download_directory>]
+                 [--data_rate=<data_rate>]
+                 [--download_timeout=<download_timeout>]
+                 [--peer_port=<peer_port>]
+                 [--max_key_fee=<max_key_fee>]
+                 [--disable_max_key_fee=<disable_max_key_fee>]
+                 [--use_upnp=<use_upnp>]
+                 [--run_reflector_server=<run_reflector_server>]
+                 [--cache_time=<cache_time>]
+                 [--reflect_uploads=<reflect_uploads>]
+                 [--share_usage_data=<share_usage_data>]
+                 [--peer_search_timeout=<peer_search_timeout>]
+                 [--sd_download_timeout=<sd_download_timeout>]
+                 [--auto_renew_claim_height_delta=<auto_renew_claim_height_delta>]
+
+
+Options:
+    --download_directory=<download_directory>                        :  (str)    path of download directory
+    --data_rate=<data_rate>                                          :  (float)  0.0001
+    --download_timeout=<download_timeout>                            :  (int)    180
+    --peer_port=<peer_port>                                          :  (int)    3333
+    --max_key_fee=<max_key_fee>                                      :  (dict)   maximum key fee for downloads,
+                                                                                 in the format:
+                                                                                 {
+                                                                                 'currency': <currency_symbol>,
+                                                                                 'amount': <amount>
+                                                                                 }.
+                                                                                 In the CLI, it must be an escaped JSON string
+                                                                                 Supported currency symbols: LBC, USD, BTC
+    --disable_max_key_fee=<disable_max_key_fee>                      :  (bool)   False
+    --use_upnp=<use_upnp>                                            :  (bool)   True
+    --run_reflector_server=<run_reflector_server>                    :  (bool)   False
+    --cache_time=<cache_time>                                        :  (int)    150
+    --reflect_uploads=<reflect_uploads>                              :  (bool)   True
+    --share_usage_data=<share_usage_data>                            :  (bool)   True
+    --peer_search_timeout=<peer_search_timeout>                      :  (int)    3
+    --sd_download_timeout=<sd_download_timeout>                      :  (int)    3
+    --auto_renew_claim_height_delta=<auto_renew_claim_height_delta>  :  (int)    0
+                                                                                 claims set to expire within this many blocks will be
+                                                                                 automatically renewed after startup (if set to 0, renews
+                                                                                 will not be made automatically)
+
+Returns:
+    (dict) Updated dictionary of daemon settings
+
+ +

status

+
Get daemon status
+
+Usage:
+    status [--session_status] [--dht_status]
+
+
+Options:
+    --session_status  :  (bool)  include session status in results
+    --dht_status      :  (bool)  include dht network and peer status
+
+Returns:
+    (dict) lbrynet-daemon status
+    {
+        'lbry_id': lbry peer id, base58,
+        'installation_id': installation id, base58,
+        'is_running': bool,
+        'is_first_run': bool,
+        'startup_status': {
+            'code': status code,
+            'message': status message
+        },
+        'connection_status': {
+            'code': connection status code,
+            'message': connection status message
+        },
+        'blockchain_status': {
+            'blocks': local blockchain height,
+            'blocks_behind': remote_height - local_height,
+            'best_blockhash': block hash of most recent block,
+        },
+        'wallet_is_encrypted': bool,
+
+        If given the session status option:
+            'session_status': {
+                'managed_blobs': count of blobs in the blob manager,
+                'managed_streams': count of streams in the file manager
+                'announce_queue_size': number of blobs currently queued to be announced
+                'should_announce_blobs': number of blobs that should be announced
+            }
+
+        If given the dht status option:
+            'dht_status': {
+                'kbps_received': current kbps receiving,
+                'kbps_sent': current kdps being sent,
+                'total_bytes_sent': total bytes sent,
+                'total_bytes_received': total bytes received,
+                'queries_received': number of queries received per second,
+                'queries_sent': number of queries sent per second,
+                'recent_contacts': count of recently contacted peers,
+                'unique_contacts': count of unique peers
+            },
+    }
+
+ +

stream_availability

+
Get stream availability for lbry uri
+
+Usage:
+    stream_availability (<uri> | --uri=<uri>)
+                        [<search_timeout> | --search_timeout=<search_timeout>]
+                        [<blob_timeout> | --blob_timeout=<blob_timeout>]
+
+
+Options:
+    --uri=<uri>                        :  (str)  check availability for this uri
+    --search_timeout=<search_timeout>  :  (int)  how long to search for peers for the blob
+                                                 in the dht
+    --search_timeout=<blob_timeout>    :  (int)  how long to try downloading from a peer
+
+Returns:
+    (dict) {
+        'is_available': <bool>,
+        'did_decode': <bool>,
+        'did_resolve': <bool>,
+        'is_stream': <bool>,
+        'num_blobs_in_stream': <int>,
+        'sd_hash': <str>,
+        'sd_blob_availability': <dict> see `blob_availability`,
+        'head_blob_hash': <str>,
+        'head_blob_availability': <dict> see `blob_availability`,
+        'use_upnp': <bool>,
+        'upnp_redirect_is_set': <bool>,
+        'error': <None> | <str> error message
+    }
+
+ +

stream_cost_estimate

+
Get estimated cost for a lbry stream
+
+Usage:
+    stream_cost_estimate (<uri> | --uri=<uri>) [<size> | --size=<size>]
+
+
+Options:
+    --uri=<uri>    :  (str)    uri to use
+    --size=<size>  :  (float)  stream size in bytes. if provided an sd blob won't be
+                               downloaded.
+
+Returns:
+    (float) Estimated cost in lbry credits, returns None if uri is not
+        resolvable
+
+ +

transaction_list

+
List transactions belonging to wallet
+
+Usage:
+    transaction_list
+
+
+Options:
+          None
+
+Returns:
+    (list) List of transactions
+
+    {
+        "claim_info": (list) claim info if in txn [{
+                                                "address": (str) address of claim,
+                                                "balance_delta": (float) bid amount,
+                                                "amount": (float) claim amount,
+                                                "claim_id": (str) claim id,
+                                                "claim_name": (str) claim name,
+                                                "nout": (int) nout
+                                                }],
+        "abandon_info": (list) abandon info if in txn [{
+                                                "address": (str) address of abandoned claim,
+                                                "balance_delta": (float) returned amount,
+                                                "amount": (float) claim amount,
+                                                "claim_id": (str) claim id,
+                                                "claim_name": (str) claim name,
+                                                "nout": (int) nout
+                                                }],
+        "confirmations": (int) number of confirmations for the txn,
+        "date": (str) date and time of txn,
+        "fee": (float) txn fee,
+        "support_info": (list) support info if in txn [{
+                                                "address": (str) address of support,
+                                                "balance_delta": (float) support amount,
+                                                "amount": (float) support amount,
+                                                "claim_id": (str) claim id,
+                                                "claim_name": (str) claim name,
+                                                "is_tip": (bool),
+                                                "nout": (int) nout
+                                                }],
+        "timestamp": (int) timestamp,
+        "txid": (str) txn id,
+        "update_info": (list) update info if in txn [{
+                                                "address": (str) address of claim,
+                                                "balance_delta": (float) credited/debited
+                                                "amount": (float) absolute amount,
+                                                "claim_id": (str) claim id,
+                                                "claim_name": (str) claim name,
+                                                "nout": (int) nout
+                                                }],
+        "value": (float) value of txn
+    }
+
+ +

transaction_show

+
Get a decoded transaction from a txid
+
+Usage:
+    transaction_show (<txid> | --txid=<txid>)
+
+
+Options:
+    --txid=<txid>  :  (str)  txid of the transaction
+
+Returns:
+    (dict) JSON formatted transaction
+
+ +

utxo_list

+
List unspent transaction outputs
+
+Usage:
+    utxo_list
+
+
+Options:
+          None
+
+Returns:
+    (list) List of unspent transaction outputs (UTXOs)
+    [
+        {
+            "address": (str) the output address
+            "amount": (float) unspent amount
+            "height": (int) block height
+            "is_claim": (bool) is the tx a claim
+            "is_coinbase": (bool) is the tx a coinbase tx
+            "is_support": (bool) is the tx a support
+            "is_update": (bool) is the tx an update
+            "nout": (int) nout of the output
+            "txid": (str) txid of the output
+        },
+        ...
+    ]
+
+ +

version

+
Get lbry version information
+
+Usage:
+    version
+
+
+Options:
+          None
+
+Returns:
+    (dict) Dictionary of lbry version information
+    {
+        'build': (str) build type (e.g. "dev", "rc", "release"),
+        'ip': (str) remote ip, if available,
+        'lbrynet_version': (str) lbrynet_version,
+        'lbryum_version': (str) lbryum_version,
+        'lbryschema_version': (str) lbryschema_version,
+        'os_release': (str) os release string
+        'os_system': (str) os name
+        'platform': (str) platform string
+        'processor': (str) processor type,
+        'python_version': (str) python version,
+    }
+
+ +

wallet_balance

+
Return the balance of the wallet
+
+Usage:
+    wallet_balance [<address> | --address=<address>] [--include_unconfirmed]
+
+
+Options:
+    --address=<address>    :  (str)   If provided only the balance for this
+                                      address will be given
+    --include_unconfirmed  :  (bool)  Include unconfirmed
+
+Returns:
+    (float) amount of lbry credits in wallet
+
+ +

wallet_decrypt

+
Decrypt an encrypted wallet, this will remove the wallet password
+
+Usage:
+    wallet_decrypt
+
+
+Options:
+          None
+
+Returns:
+    (bool) true if wallet is decrypted, otherwise false
+
+ +

wallet_encrypt

+
Encrypt a wallet with a password, if the wallet is already encrypted this will update
+the password
+
+Usage:
+    wallet_encrypt (<new_password> | --new_password=<new_password>)
+
+
+Options:
+    --new_password=<new_password>  :  (str)  password string to be used for encrypting wallet
+
+Returns:
+    (bool) true if wallet is decrypted, otherwise false
+
+ +

wallet_is_address_mine

+
Checks if an address is associated with the current wallet.
+
+Usage:
+    wallet_is_address_mine (<address> | --address=<address>)
+
+
+Options:
+    --address=<address>  :  (str)  address to check
+
+Returns:
+    (bool) true, if address is associated with current wallet
+
+ +

wallet_list

+
List wallet addresses
+
+Usage:
+    wallet_list
+
+
+Options:
+          None
+
+Returns:
+    List of wallet addresses
+
+ +

wallet_new_address

+
Generate a new wallet address
+
+Usage:
+    wallet_new_address
+
+
+Options:
+          None
+
+Returns:
+    (str) New wallet address in base58
+
+ +

wallet_prefill_addresses

+
Create new addresses, each containing `amount` credits
+
+Usage:
+    wallet_prefill_addresses [--no_broadcast]
+                             (<num_addresses> | --num_addresses=<num_addresses>)
+                             (<amount> | --amount=<amount>)
+
+
+Options:
+    --no_broadcast                   :  (bool)   whether to broadcast or not
+    --num_addresses=<num_addresses>  :  (int)    num of addresses to create
+    --amount=<amount>                :  (float)  initial amount in each address
+
+Returns:
+    (dict) the resulting transaction
+
+ +

wallet_public_key

+
Get public key from wallet address
+
+Usage:
+    wallet_public_key (<address> | --address=<address>)
+
+
+Options:
+    --address=<address>  :  (str)  address for which to get the public key
+
+Returns:
+    (list) list of public keys associated with address.
+        Could contain more than one public key if multisig.
+
+ +

wallet_send

+
Send credits. If given an address, send credits to it. If given a claim id, send a tip
+to the owner of a claim specified by uri. A tip is a claim support where the recipient
+of the support is the claim address for the claim being supported.
+
+Usage:
+    wallet_send (<amount> | --amount=<amount>)
+                ((<address> | --address=<address>) | (<claim_id> | --claim_id=<claim_id>))
+
+
+Options:
+    --amount=<amount>      :  (float)  amount of credit to send
+    --address=<address>    :  (str)    address to send credits to
+    --claim_id=<claim_id>  :  (float)  claim_id of the claim to send to tip to
+
+Returns:
+    If sending to an address:
+    (bool) true if payment successfully scheduled
+
+    If sending a claim tip:
+    (dict) Dictionary containing the result of the support
+    {
+        txid : (str) txid of resulting support claim
+        nout : (int) nout of the resulting support claim
+        fee : (float) fee paid for the transaction
+    }
+
+ +

wallet_unlock

+
Unlock an encrypted wallet
+
+Usage:
+    wallet_unlock (<password> | --password=<password>)
+
+
+Options:
+    --password=<password>  :  (str)  password for unlocking wallet
+
+Returns:
+    (bool) true if wallet is unlocked, otherwise false
+
+ +

wallet_unused_address

+
Return an address containing no balance, will create
+a new address if there is none.
+
+Usage:
+    wallet_unused_address
+
+
+Options:
+          None
+
+Returns:
+    (str) Unused wallet address in base58
+
+ + + + + + + +
+
+
+
+ + + + +
+ + + + + + + + + + + + + \ No newline at end of file diff --git a/docs/index.html b/docs/index.html new file mode 100644 index 000000000..8c3eac1cd --- /dev/null +++ b/docs/index.html @@ -0,0 +1,2262 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + LBRY + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Skip to content + + + +
+ +
+ +
+ + + + +
+
+ + +
+
+
+ +
+
+
+ + +
+
+
+ + +
+
+
+ + +
+
+ + + + + +

LBRY JSON-RPC API Documentation

+

blob_announce

+
Announce blobs to the DHT
+
+Args:
+    'announce_all' (optional)  :  (bool)  announce all the blobs possessed by user
+    'blob_hash' (optional)     :  (str)   announce a blob, specified by blob_hash
+    'stream_hash' (optional)   :  (str)   announce all blobs associated with
+                                          stream_hash
+    'sd_hash' (optional)       :  (str)   announce all blobs associated with
+                                          sd_hash and the sd_hash itself
+
+Returns:
+    (bool) true if successful
+
+ +

blob_availability

+
Get blob availability
+
+Args:
+    'blob_hash' (optional)       :  (str)  check availability for this blob hash
+    'search_timeout' (optional)  :  (int)  how long to search for peers for the blob
+                                           in the dht
+    'blob_timeout' (optional)    :  (int)  how long to try downloading from a peer
+
+Returns:
+    (dict) {
+        "is_available": <bool, true if blob is available from a peer from peer list>
+        "reachable_peers": ["<ip>:<port>"],
+        "unreachable_peers": ["<ip>:<port>"]
+    }
+
+ +

blob_delete

+
Delete a blob
+
+Args:
+    'blob_hash' (optional)  :  (str)  blob hash of the blob to delete
+
+Returns:
+    (str) Success/fail message
+
+ +

blob_get

+
Download and return a blob
+
+Args:
+    'blob_hash'                        :  (str)  blob hash of the blob to get
+    'timeout' (optional)               :  (int)  timeout in number of seconds
+    'encoding' (optional)              :  (str)  by default no attempt at decoding
+                                                 is made, can be set to one of the
+                                                 following decoders:
+                                                 'json'
+    'payment_rate_manager' (optional)  :  (str)  if not given the default payment rate
+                                                 manager will be used.
+                                                 supported alternative rate managers:
+                                                 'only-free'
+
+Returns:
+    (str) Success/Fail message or (dict) decoded data
+
+ +

blob_list

+
Returns blob hashes. If not given filters, returns all blobs known by the blob manager
+
+Args:
+    'needed' (optional)       :  (bool)  only return needed blobs
+    'finished' (optional)     :  (bool)  only return finished blobs
+    'uri' (optional)          :  (str)   filter blobs by stream in a uri
+    'stream_hash' (optional)  :  (str)   filter blobs by stream hash
+    'sd_hash' (optional)      :  (str)   filter blobs by sd hash
+    'page_size' (optional)    :  (int)   results page size
+    'page' (optional)         :  (int)   page of results to return
+
+Returns:
+    (list) List of blob hashes
+
+ +

blob_reflect_all

+
Reflects all saved blobs
+
+Args:
+          None
+
+Returns:
+    (bool) true if successful
+
+ +

block_show

+
Get contents of a block
+
+Args:
+    'blockhash'  :  (str)  hash of the block to look up
+    'height'     :  (int)  height of the block to look up
+
+Returns:
+    (dict) Requested block
+
+ +

channel_export

+
Export serialized channel signing information for a given certificate claim id
+
+Args:
+    'claim_id'  :  (str)  Claim ID to export information about
+
+Returns:
+    (str) Serialized certificate information
+
+ +

channel_import

+
Import serialized channel signing information (to allow signing new claims to the channel)
+
+Args:
+    'serialized_certificate_info' (optional)  :  (str)  certificate info
+
+Returns:
+    (dict) Result dictionary
+
+ +

channel_list

+
Get certificate claim infos for channels that can be published to
+
+Args:
+          None
+
+Returns:
+    (list) ClaimDict, includes 'is_mine' field to indicate if the certificate claim
+    is in the wallet.
+
+ +

channel_new

+
Generate a publisher key and create a new '@' prefixed certificate claim
+
+Args:
+    'channel_name'  :  (str)    name of the channel prefixed with '@'
+    'amount'        :  (float)  bid amount on the channel
+
+Returns:
+    (dict) Dictionary containing result of the claim
+    {
+        'tx' : (str) hex encoded transaction
+        'txid' : (str) txid of resulting claim
+        'nout' : (int) nout of the resulting claim
+        'fee' : (float) fee paid for the claim transaction
+        'claim_id' : (str) claim ID of the resulting claim
+    }
+
+ +

claim_abandon

+
Abandon a name and reclaim credits from the claim
+
+Args:
+    'claim_id' (optional)  :  (str)  claim_id of the claim to abandon
+    'txid' (optional)      :  (str)  txid of the claim to abandon
+    'nout' (optional)      :  (int)  nout of the claim to abandon
+
+Returns:
+    (dict) Dictionary containing result of the claim
+    {
+        txid : (str) txid of resulting transaction
+        fee : (float) fee paid for the transaction
+    }
+
+ +

claim_list

+
List current claims and information about them for a given name
+
+Args:
+    'name'  :  (str)  name of the claim to list info about
+
+Returns:
+    (dict) State of claims assigned for the name
+    {
+        'claims': (list) list of claims for the name
+        [
+            {
+            'amount': (float) amount assigned to the claim
+            'effective_amount': (float) total amount assigned to the claim,
+                                including supports
+            'claim_id': (str) claim ID of the claim
+            'height': (int) height of block containing the claim
+            'txid': (str) txid of the claim
+            'nout': (int) nout of the claim
+            'permanent_url': (str) permanent url of the claim,
+            'supports': (list) a list of supports attached to the claim
+            'value': (str) the value of the claim
+            },
+        ]
+        'supports_without_claims': (list) supports without any claims attached to them
+        'last_takeover_height': (int) the height of last takeover for the name
+    }
+
+ +

claim_list_by_channel

+
Get paginated claims in a channel specified by a channel uri
+
+Args:
+    'uri'                   :  (str)   uri of the channel
+    'uris' (optional)       :  (list)  uris of the channel
+    'page' (optional)       :  (int)   which page of results to return where page 1 is the first
+                                       page, defaults to no pages
+    'page_size' (optional)  :  (int)   number of results in a page, default of 10
+
+Returns:
+    {
+         resolved channel uri: {
+            If there was an error:
+            'error': (str) error message
+
+            'claims_in_channel': the total number of results for the channel,
+
+            If a page of results was requested:
+            'returned_page': page number returned,
+            'claims_in_channel': [
+                {
+                    'absolute_channel_position': (int) claim index number in sorted list of
+                                                 claims which assert to be part of the
+                                                 channel
+                    'address': (str) claim address,
+                    'amount': (float) claim amount,
+                    'effective_amount': (float) claim amount including supports,
+                    'claim_id': (str) claim id,
+                    'claim_sequence': (int) claim sequence number,
+                    'decoded_claim': (bool) whether or not the claim value was decoded,
+                    'height': (int) claim height,
+                    'depth': (int) claim depth,
+                    'has_signature': (bool) included if decoded_claim
+                    'name': (str) claim name,
+                    'supports: (list) list of supports [{'txid': (str) txid,
+                                                         'nout': (int) nout,
+                                                         'amount': (float) amount}],
+                    'txid': (str) claim txid,
+                    'nout': (str) claim nout,
+                    'signature_is_valid': (bool), included if has_signature,
+                    'value': ClaimDict if decoded, otherwise hex string
+                }
+            ],
+        }
+    }
+
+ +

claim_list_mine

+
List my name claims
+
+Args:
+          None
+
+Returns:
+    (list) List of name claims owned by user
+    [
+        {
+            'address': (str) address that owns the claim
+            'amount': (float) amount assigned to the claim
+            'blocks_to_expiration': (int) number of blocks until it expires
+            'category': (str) "claim", "update" , or "support"
+            'claim_id': (str) claim ID of the claim
+            'confirmations': (int) number of blocks of confirmations for the claim
+            'expiration_height': (int) the block height which the claim will expire
+            'expired': (bool) true if expired, false otherwise
+            'height': (int) height of the block containing the claim
+            'is_spent': (bool) true if claim is abandoned, false otherwise
+            'name': (str) name of the claim
+            'permanent_url': (str) permanent url of the claim,
+            'txid': (str) txid of the cliam
+            'nout': (int) nout of the claim
+            'value': (str) value of the claim
+        },
+   ]
+
+ +

claim_new_support

+
Support a name claim
+
+Args:
+    'name'      :  (str)    name of the claim to support
+    'claim_id'  :  (str)    claim_id of the claim to support
+    'amount'    :  (float)  amount of support
+
+Returns:
+    (dict) Dictionary containing result of the claim
+    {
+        txid : (str) txid of resulting support claim
+        nout : (int) nout of the resulting support claim
+        fee : (float) fee paid for the transaction
+    }
+
+ +

claim_renew

+
Renew claim(s) or support(s)
+
+Args:
+    'outpoint'  :  (str)  outpoint of the claim to renew
+    'height'    :  (str)  update claims expiring before or at this block height
+
+Returns:
+    (dict) Dictionary where key is the the original claim's outpoint and
+    value is the result of the renewal
+    {
+        outpoint:{
+
+            'tx' : (str) hex encoded transaction
+            'txid' : (str) txid of resulting claim
+            'nout' : (int) nout of the resulting claim
+            'fee' : (float) fee paid for the claim transaction
+            'claim_id' : (str) claim ID of the resulting claim
+        },
+    }
+
+ +

claim_send_to_address

+
Send a name claim to an address
+
+Args:
+    'claim_id'           :  (str)  claim_id to send
+    'address'            :  (str)  address to send the claim to
+    'amount' (optional)  :  (int)  Amount of credits to claim name for, defaults to the current amount
+                                   on the claim
+
+Returns:
+    (dict) Dictionary containing result of the claim
+    {
+        'tx' : (str) hex encoded transaction
+        'txid' : (str) txid of resulting claim
+        'nout' : (int) nout of the resulting claim
+        'fee' : (float) fee paid for the claim transaction
+        'claim_id' : (str) claim ID of the resulting claim
+    }
+
+ +

claim_show

+
Resolve claim info from txid/nout or with claim ID
+
+Args:
+    'txid' (optional)      :  (str)  look for claim with this txid, nout must
+                                     also be specified
+    'nout' (optional)      :  (int)  look for claim with this nout, txid must
+                                     also be specified
+    'claim_id' (optional)  :  (str)  look for claim with this claim id
+
+Returns:
+    (dict) Dictionary containing claim info as below,
+
+    {
+        'txid': (str) txid of claim
+        'nout': (int) nout of claim
+        'amount': (float) amount of claim
+        'value': (str) value of claim
+        'height' : (int) height of claim takeover
+        'claim_id': (str) claim ID of claim
+        'supports': (list) list of supports associated with claim
+    }
+
+    if claim cannot be resolved, dictionary as below will be returned
+
+    {
+        'error': (str) reason for error
+    }
+
+ +

cli_test_command

+
This command is only for testing the CLI argument parsing
+Args:
+    'a_arg' (optional)     :  a    arg
+    'b_arg' (optional)     :  b    arg
+    'pos_arg'              :  pos  arg
+    'pos_args' (optional)  :  pos  args
+    'pos_arg2' (optional)  :  pos  arg 2
+    'pos_arg3' (optional)  :  pos  arg 3
+
+Returns:
+    pos args
+
+ +

commands

+
Return a list of available commands
+
+Args:
+          None
+
+Returns:
+    (list) list of available commands
+
+ +

daemon_stop

+
Stop lbrynet-daemon
+
+Args:
+          None
+
+Returns:
+    (string) Shutdown message
+
+ +

file_delete

+
Delete a LBRY file
+
+Args:
+    'delete_from_download_dir' (optional)  :  (bool)  delete file from download directory,
+                                                      instead of just deleting blobs
+    'delete_all' (optional)                :  (bool)  if there are multiple matching files,
+                                                      allow the deletion of multiple files.
+                                                      Otherwise do not delete anything.
+    'sd_hash' (optional)                   :  (str)   delete by file sd hash
+    'file_name' (optional)                 :  (str)   delete by file name in downloads folder
+    'stream_hash' (optional)               :  (str)   delete by file stream hash
+    'rowid' (optional)                     :  (int)   delete by file row id
+    'claim_id' (optional)                  :  (str)   delete by file claim id
+    'txid' (optional)                      :  (str)   delete by file claim txid
+    'nout' (optional)                      :  (int)   delete by file claim nout
+    'claim_name' (optional)                :  (str)   delete by file claim name
+    'channel_claim_id' (optional)          :  (str)   delete by file channel claim id
+    'channel_name' (optional)              :  (str)   delete by file channel claim name
+
+Returns:
+    (bool) true if deletion was successful
+
+ +

file_list

+
List files limited by optional filters
+
+Args:
+    'sd_hash' (optional)           :  (str)   get file with matching sd hash
+    'file_name' (optional)         :  (str)   get file with matching file name in the
+                                              downloads folder
+    'stream_hash' (optional)       :  (str)   get file with matching stream hash
+    'rowid' (optional)             :  (int)   get file with matching row id
+    'claim_id' (optional)          :  (str)   get file with matching claim id
+    'outpoint' (optional)          :  (str)   get file with matching claim outpoint
+    'txid' (optional)              :  (str)   get file with matching claim txid
+    'nout' (optional)              :  (int)   get file with matching claim nout
+    'channel_claim_id' (optional)  :  (str)   get file with matching channel claim id
+    'channel_name' (optional)      :  (str)   get file with matching channel name
+    'claim_name' (optional)        :  (str)   get file with matching claim name
+    'full_status' (optional)       :  (bool)  full status, populate the
+                                              'message' and 'size' fields
+
+Returns:
+    (list) List of files
+
+    [
+        {
+            'completed': (bool) true if download is completed,
+            'file_name': (str) name of file,
+            'download_directory': (str) download directory,
+            'points_paid': (float) credit paid to download file,
+            'stopped': (bool) true if download is stopped,
+            'stream_hash': (str) stream hash of file,
+            'stream_name': (str) stream name ,
+            'suggested_file_name': (str) suggested file name,
+            'sd_hash': (str) sd hash of file,
+            'download_path': (str) download path of file,
+            'mime_type': (str) mime type of file,
+            'key': (str) key attached to file,
+            'total_bytes': (int) file size in bytes, None if full_status is false,
+            'written_bytes': (int) written size in bytes,
+            'blobs_completed': (int) num_completed, None if full_status is false,
+            'blobs_in_stream': (int) None if full_status is false,
+            'status': (str) downloader status, None if full_status is false,
+            'claim_id': (str) None if full_status is false or if claim is not found,
+            'outpoint': (str) None if full_status is false or if claim is not found,
+            'txid': (str) None if full_status is false or if claim is not found,
+            'nout': (int) None if full_status is false or if claim is not found,
+            'metadata': (dict) None if full_status is false or if claim is not found,
+            'channel_claim_id': (str) None if full_status is false or if claim is not found or signed,
+            'channel_name': (str) None if full_status is false or if claim is not found or signed,
+            'claim_name': (str) None if full_status is false or if claim is not found
+        },
+    ]
+
+ +

file_reflect

+
Reflect all the blobs in a file matching the filter criteria
+
+Args:
+    'sd_hash' (optional)      :  (str)  get file with matching sd hash
+    'file_name' (optional)    :  (str)  get file with matching file name in the
+                                        downloads folder
+    'stream_hash' (optional)  :  (str)  get file with matching stream hash
+    'rowid' (optional)        :  (int)  get file with matching row id
+    'reflector' (optional)    :  (str)  reflector server, ip address or url
+                                        by default choose a server from the config
+
+Returns:
+    (list) list of blobs reflected
+
+ +

file_set_status

+
Start or stop downloading a file
+
+Args:
+    'status'                  :  (str)  one of "start" or "stop"
+    'sd_hash' (optional)      :  (str)  set status of file with matching sd hash
+    'file_name' (optional)    :  (str)  set status of file with matching file name in the
+                                        downloads folder
+    'stream_hash' (optional)  :  (str)  set status of file with matching stream hash
+    'rowid' (optional)        :  (int)  set status of file with matching row id
+
+Returns:
+    (str) Confirmation message
+
+ +

get

+
Download stream from a LBRY name.
+
+Args:
+    'uri' (optional)        :  (str)  uri of the content to download
+    'file_name' (optional)  :  (str)  specified name for the downloaded file
+    'timeout' (optional)    :  (int)  download timeout in number of seconds
+
+Returns:
+    (dict) Dictionary containing information about the stream
+    {
+        'completed': (bool) true if download is completed,
+        'file_name': (str) name of file,
+        'download_directory': (str) download directory,
+        'points_paid': (float) credit paid to download file,
+        'stopped': (bool) true if download is stopped,
+        'stream_hash': (str) stream hash of file,
+        'stream_name': (str) stream name ,
+        'suggested_file_name': (str) suggested file name,
+        'sd_hash': (str) sd hash of file,
+        'download_path': (str) download path of file,
+        'mime_type': (str) mime type of file,
+        'key': (str) key attached to file,
+        'total_bytes': (int) file size in bytes, None if full_status is false,
+        'written_bytes': (int) written size in bytes,
+        'blobs_completed': (int) num_completed, None if full_status is false,
+        'blobs_in_stream': (int) None if full_status is false,
+        'status': (str) downloader status, None if full_status is false,
+        'claim_id': (str) claim id,
+        'outpoint': (str) claim outpoint string,
+        'txid': (str) claim txid,
+        'nout': (int) claim nout,
+        'metadata': (dict) claim metadata,
+        'channel_claim_id': (str) None if claim is not signed
+        'channel_name': (str) None if claim is not signed
+        'claim_name': (str) claim name
+    }
+
+ +

help

+
Return a useful message for an API command
+
+Args:
+    'command' (optional)  :  (str)  command to retrieve documentation for
+
+Returns:
+    (str) Help message
+
+ +

peer_list

+
Get peers for blob hash
+
+Args:
+    'blob_hash'           :  (str)  find available peers for this blob hash
+    'timeout' (optional)  :  (int)  peer search timeout in seconds
+
+Returns:
+    (list) List of contacts
+
+ +

publish

+
Make a new name claim and publish associated data to lbrynet,
+update over existing claim if user already has a claim for name.
+
+Fields required in the final Metadata are:
+    'title'
+    'description'
+    'author'
+    'language'
+    'license'
+    'nsfw'
+
+Metadata can be set by either using the metadata argument or by setting individual arguments
+fee, title, description, author, language, license, license_url, thumbnail, preview, nsfw,
+or sources. Individual arguments will overwrite the fields specified in metadata argument.
+
+Args:
+    'name'                      :  (str)    name of the content
+    'bid'                       :  (float)  amount to back the claim
+    'metadata' (optional)       :  (dict)   ClaimDict to associate with the claim.
+    'file_path' (optional)      :  (str)    path to file to be associated with name. If provided,
+                                            a lbry stream of this file will be used in 'sources'.
+                                            If no path is given but a sources dict is provided,
+                                            it will be used. If neither are provided, an
+                                            error is raised.
+    'fee' (optional)            :  (dict)   Dictionary representing key fee to download content:
+                                            {
+                                            'currency': currency_symbol,
+                                            'amount': float,
+                                            'address': str, optional
+                                            }
+                                            supported currencies: LBC, USD, BTC
+                                            If an address is not provided a new one will be
+                                            automatically generated. Default fee is zero.
+    'title' (optional)          :  (str)    title of the publication
+    'description' (optional)    :  (str)    description of the publication
+    'author' (optional)         :  (str)    author of the publication
+    'language' (optional)       :  (str)    language of the publication
+    'license' (optional)        :  (str)    publication license
+    'license_url' (optional)    :  (str)    publication license url
+    'thumbnail' (optional)      :  (str)    thumbnail url
+    'preview' (optional)        :  (str)    preview url
+    'nsfw' (optional)           :  (bool)   title of the publication
+    'sources' (optional)        :  (str)    {'lbry_sd_hash': sd_hash} specifies sd hash of file
+    'channel_name' (optional)   :  (str)    name of the publisher channel name in the wallet
+    'channel_id' (optional)     :  (str)    claim id of the publisher channel, does not check
+                                            for channel claim being in the wallet. This allows
+                                            publishing to a channel where only the certificate
+                                            private key is in the wallet.
+    'claim_address' (optional)  :  (str)    address where the claim is sent to, if not specified
+                                            new address wil automatically be created
+
+Returns:
+    (dict) Dictionary containing result of the claim
+    {
+        'tx' : (str) hex encoded transaction
+        'txid' : (str) txid of resulting claim
+        'nout' : (int) nout of the resulting claim
+        'fee' : (float) fee paid for the claim transaction
+        'claim_id' : (str) claim ID of the resulting claim
+    }
+
+ +

report_bug

+
Report a bug to slack
+
+Args:
+    'message'  :  (str)  Description of the bug
+
+Returns:
+    (bool) true if successful
+
+ +

resolve

+
Resolve given LBRY URIs
+
+Args:
+    'force' (optional)  :  (bool)  force refresh and ignore cache
+    'uri'               :  (str)   uri to resolve
+    'uris' (optional)   :  (list)  uris to resolve
+
+Returns:
+    Dictionary of results, keyed by uri
+    '<uri>': {
+            If a resolution error occurs:
+            'error': Error message
+
+            If the uri resolves to a channel or a claim in a channel:
+            'certificate': {
+                'address': (str) claim address,
+                'amount': (float) claim amount,
+                'effective_amount': (float) claim amount including supports,
+                'claim_id': (str) claim id,
+                'claim_sequence': (int) claim sequence number,
+                'decoded_claim': (bool) whether or not the claim value was decoded,
+                'height': (int) claim height,
+                'depth': (int) claim depth,
+                'has_signature': (bool) included if decoded_claim
+                'name': (str) claim name,
+                'permanent_url': (str) permanent url of the certificate claim,
+                'supports: (list) list of supports [{'txid': (str) txid,
+                                                     'nout': (int) nout,
+                                                     'amount': (float) amount}],
+                'txid': (str) claim txid,
+                'nout': (str) claim nout,
+                'signature_is_valid': (bool), included if has_signature,
+                'value': ClaimDict if decoded, otherwise hex string
+            }
+
+            If the uri resolves to a channel:
+            'claims_in_channel': (int) number of claims in the channel,
+
+            If the uri resolves to a claim:
+            'claim': {
+                'address': (str) claim address,
+                'amount': (float) claim amount,
+                'effective_amount': (float) claim amount including supports,
+                'claim_id': (str) claim id,
+                'claim_sequence': (int) claim sequence number,
+                'decoded_claim': (bool) whether or not the claim value was decoded,
+                'height': (int) claim height,
+                'depth': (int) claim depth,
+                'has_signature': (bool) included if decoded_claim
+                'name': (str) claim name,
+                'permanent_url': (str) permanent url of the claim,
+                'channel_name': (str) channel name if claim is in a channel
+                'supports: (list) list of supports [{'txid': (str) txid,
+                                                     'nout': (int) nout,
+                                                     'amount': (float) amount}]
+                'txid': (str) claim txid,
+                'nout': (str) claim nout,
+                'signature_is_valid': (bool), included if has_signature,
+                'value': ClaimDict if decoded, otherwise hex string
+            }
+    }
+
+ +

resolve_name

+
Resolve stream info from a LBRY name
+
+Args:
+    'name'              :  (str)   the name to resolve
+    'force' (optional)  :  (bool)  force refresh and do not check cache
+
+Returns:
+    (dict) Metadata dictionary from name claim, None if the name is not
+            resolvable
+
+ +

routing_table_get

+
Get DHT routing information
+
+Args:
+          None
+
+Returns:
+    (dict) dictionary containing routing and contact information
+    {
+        "buckets": {
+            <bucket index>: [
+                {
+                    "address": (str) peer address,
+                    "node_id": (str) peer node id,
+                    "blobs": (list) blob hashes announced by peer
+                }
+            ]
+        },
+        "contacts": (list) contact node ids,
+        "blob_hashes": (list) all of the blob hashes stored by peers in the list of buckets,
+        "node_id": (str) the local dht node id
+    }
+
+ +

settings_get

+
Get daemon settings
+
+Args:
+          None
+
+Returns:
+    (dict) Dictionary of daemon settings
+    See ADJUSTABLE_SETTINGS in lbrynet/conf.py for full list of settings
+
+ +

settings_set

+
Set daemon settings
+
+Args:
+    'download_directory' (optional)             :  (str)    path of download directory
+    'data_rate' (optional)                      :  (float)  0.0001
+    'download_timeout' (optional)               :  (int)    180
+    'peer_port' (optional)                      :  (int)    3333
+    'max_key_fee' (optional)                    :  (dict)   maximum key fee for downloads,
+                                                            in the format:
+                                                            {
+                                                            'currency': <currency_symbol>,
+                                                            'amount': <amount>
+                                                            }.
+                                                            In the CLI, it must be an escaped JSON string
+                                                            Supported currency symbols: LBC, USD, BTC
+    'disable_max_key_fee' (optional)            :  (bool)   False
+    'use_upnp' (optional)                       :  (bool)   True
+    'run_reflector_server' (optional)           :  (bool)   False
+    'cache_time' (optional)                     :  (int)    150
+    'reflect_uploads' (optional)                :  (bool)   True
+    'share_usage_data' (optional)               :  (bool)   True
+    'peer_search_timeout' (optional)            :  (int)    3
+    'sd_download_timeout' (optional)            :  (int)    3
+    'auto_renew_claim_height_delta' (optional)  :  (int)    0
+                                                            claims set to expire within this many blocks will be
+                                                            automatically renewed after startup (if set to 0, renews
+                                                            will not be made automatically)
+
+Returns:
+    (dict) Updated dictionary of daemon settings
+
+ +

status

+
Get daemon status
+
+Args:
+    'session_status' (optional)  :  (bool)  include session status in results
+    'dht_status' (optional)      :  (bool)  include dht network and peer status
+
+Returns:
+    (dict) lbrynet-daemon status
+    {
+        'lbry_id': lbry peer id, base58,
+        'installation_id': installation id, base58,
+        'is_running': bool,
+        'is_first_run': bool,
+        'startup_status': {
+            'code': status code,
+            'message': status message
+        },
+        'connection_status': {
+            'code': connection status code,
+            'message': connection status message
+        },
+        'blockchain_status': {
+            'blocks': local blockchain height,
+            'blocks_behind': remote_height - local_height,
+            'best_blockhash': block hash of most recent block,
+        },
+        'wallet_is_encrypted': bool,
+
+        If given the session status option:
+            'session_status': {
+                'managed_blobs': count of blobs in the blob manager,
+                'managed_streams': count of streams in the file manager
+                'announce_queue_size': number of blobs currently queued to be announced
+                'should_announce_blobs': number of blobs that should be announced
+            }
+
+        If given the dht status option:
+            'dht_status': {
+                'kbps_received': current kbps receiving,
+                'kbps_sent': current kdps being sent,
+                'total_bytes_sent': total bytes sent,
+                'total_bytes_received': total bytes received,
+                'queries_received': number of queries received per second,
+                'queries_sent': number of queries sent per second,
+                'recent_contacts': count of recently contacted peers,
+                'unique_contacts': count of unique peers
+            },
+    }
+
+ +

stream_availability

+
Get stream availability for lbry uri
+
+Args:
+    'uri'                        :  (str)  check availability for this uri
+    'search_timeout' (optional)  :  (int)  how long to search for peers for the blob
+                                           in the dht
+    'search_timeout' (optional)  :  (int)  how long to try downloading from a peer
+
+Returns:
+    (dict) {
+        'is_available': <bool>,
+        'did_decode': <bool>,
+        'did_resolve': <bool>,
+        'is_stream': <bool>,
+        'num_blobs_in_stream': <int>,
+        'sd_hash': <str>,
+        'sd_blob_availability': <dict> see `blob_availability`,
+        'head_blob_hash': <str>,
+        'head_blob_availability': <dict> see `blob_availability`,
+        'use_upnp': <bool>,
+        'upnp_redirect_is_set': <bool>,
+        'error': <None> | <str> error message
+    }
+
+ +

stream_cost_estimate

+
Get estimated cost for a lbry stream
+
+Args:
+    'uri'              :  (str)    uri to use
+    'size' (optional)  :  (float)  stream size in bytes. if provided an sd blob won't be
+                                   downloaded.
+
+Returns:
+    (float) Estimated cost in lbry credits, returns None if uri is not
+        resolvable
+
+ +

transaction_list

+
List transactions belonging to wallet
+
+Args:
+          None
+
+Returns:
+    (list) List of transactions
+
+    {
+        "claim_info": (list) claim info if in txn [{
+                                                "address": (str) address of claim,
+                                                "balance_delta": (float) bid amount,
+                                                "amount": (float) claim amount,
+                                                "claim_id": (str) claim id,
+                                                "claim_name": (str) claim name,
+                                                "nout": (int) nout
+                                                }],
+        "abandon_info": (list) abandon info if in txn [{
+                                                "address": (str) address of abandoned claim,
+                                                "balance_delta": (float) returned amount,
+                                                "amount": (float) claim amount,
+                                                "claim_id": (str) claim id,
+                                                "claim_name": (str) claim name,
+                                                "nout": (int) nout
+                                                }],
+        "confirmations": (int) number of confirmations for the txn,
+        "date": (str) date and time of txn,
+        "fee": (float) txn fee,
+        "support_info": (list) support info if in txn [{
+                                                "address": (str) address of support,
+                                                "balance_delta": (float) support amount,
+                                                "amount": (float) support amount,
+                                                "claim_id": (str) claim id,
+                                                "claim_name": (str) claim name,
+                                                "is_tip": (bool),
+                                                "nout": (int) nout
+                                                }],
+        "timestamp": (int) timestamp,
+        "txid": (str) txn id,
+        "update_info": (list) update info if in txn [{
+                                                "address": (str) address of claim,
+                                                "balance_delta": (float) credited/debited
+                                                "amount": (float) absolute amount,
+                                                "claim_id": (str) claim id,
+                                                "claim_name": (str) claim name,
+                                                "nout": (int) nout
+                                                }],
+        "value": (float) value of txn
+    }
+
+ +

transaction_show

+
Get a decoded transaction from a txid
+
+Args:
+    'txid'  :  (str)  txid of the transaction
+
+Returns:
+    (dict) JSON formatted transaction
+
+ +

utxo_list

+
List unspent transaction outputs
+
+Args:
+          None
+
+Returns:
+    (list) List of unspent transaction outputs (UTXOs)
+    [
+        {
+            "address": (str) the output address
+            "amount": (float) unspent amount
+            "height": (int) block height
+            "is_claim": (bool) is the tx a claim
+            "is_coinbase": (bool) is the tx a coinbase tx
+            "is_support": (bool) is the tx a support
+            "is_update": (bool) is the tx an update
+            "nout": (int) nout of the output
+            "txid": (str) txid of the output
+        },
+        ...
+    ]
+
+ +

version

+
Get lbry version information
+
+Args:
+          None
+
+Returns:
+    (dict) Dictionary of lbry version information
+    {
+        'build': (str) build type (e.g. "dev", "rc", "release"),
+        'ip': (str) remote ip, if available,
+        'lbrynet_version': (str) lbrynet_version,
+        'lbryum_version': (str) lbryum_version,
+        'lbryschema_version': (str) lbryschema_version,
+        'os_release': (str) os release string
+        'os_system': (str) os name
+        'platform': (str) platform string
+        'processor': (str) processor type,
+        'python_version': (str) python version,
+    }
+
+ +

wallet_balance

+
Return the balance of the wallet
+
+Args:
+    'address' (optional)              :  (str)   If provided only the balance for this
+                                                 address will be given
+    'include_unconfirmed' (optional)  :  (bool)  Include unconfirmed
+
+Returns:
+    (float) amount of lbry credits in wallet
+
+ +

wallet_decrypt

+
Decrypt an encrypted wallet, this will remove the wallet password
+
+Args:
+          None
+
+Returns:
+    (bool) true if wallet is decrypted, otherwise false
+
+ +

wallet_encrypt

+
Encrypt a wallet with a password, if the wallet is already encrypted this will update
+the password
+
+Args:
+    'new_password'  :  (str)  password string to be used for encrypting wallet
+
+Returns:
+    (bool) true if wallet is decrypted, otherwise false
+
+ +

wallet_is_address_mine

+
Checks if an address is associated with the current wallet.
+
+Args:
+    'address'  :  (str)  address to check
+
+Returns:
+    (bool) true, if address is associated with current wallet
+
+ +

wallet_list

+
List wallet addresses
+
+Args:
+          None
+
+Returns:
+    List of wallet addresses
+
+ +

wallet_new_address

+
Generate a new wallet address
+
+Args:
+          None
+
+Returns:
+    (str) New wallet address in base58
+
+ +

wallet_prefill_addresses

+
Create new addresses, each containing `amount` credits
+
+Args:
+    'no_broadcast' (optional)  :  (bool)   whether to broadcast or not
+    'num_addresses'            :  (int)    num of addresses to create
+    'amount'                   :  (float)  initial amount in each address
+
+Returns:
+    (dict) the resulting transaction
+
+ +

wallet_public_key

+
Get public key from wallet address
+
+Args:
+    'address'  :  (str)  address for which to get the public key
+
+Returns:
+    (list) list of public keys associated with address.
+        Could contain more than one public key if multisig.
+
+ +

wallet_send

+
Send credits. If given an address, send credits to it. If given a claim id, send a tip
+to the owner of a claim specified by uri. A tip is a claim support where the recipient
+of the support is the claim address for the claim being supported.
+
+Args:
+    'amount'    :  (float)  amount of credit to send
+    'address'   :  (str)    address to send credits to
+    'claim_id'  :  (float)  claim_id of the claim to send to tip to
+
+Returns:
+    If sending to an address:
+    (bool) true if payment successfully scheduled
+
+    If sending a claim tip:
+    (dict) Dictionary containing the result of the support
+    {
+        txid : (str) txid of resulting support claim
+        nout : (int) nout of the resulting support claim
+        fee : (float) fee paid for the transaction
+    }
+
+ +

wallet_unlock

+
Unlock an encrypted wallet
+
+Args:
+    'password'  :  (str)  password for unlocking wallet
+
+Returns:
+    (bool) true if wallet is unlocked, otherwise false
+
+ +

wallet_unused_address

+
Return an address containing no balance, will create
+a new address if there is none.
+
+Args:
+          None
+
+Returns:
+    (str) Unused wallet address in base58
+
+ + + + + + + +
+
+
+
+ + + + +
+ + + + + + + + + + + + + \ No newline at end of file diff --git a/docs/index.md b/docs/index.md deleted file mode 100644 index a0f391ca9..000000000 --- a/docs/index.md +++ /dev/null @@ -1,1215 +0,0 @@ -# LBRY JSON-RPC API Documentation - -## blob_announce - -```text -Announce blobs to the DHT - -Args: - 'announce_all' (optional) : (bool) announce all the blobs possessed by user - 'blob_hash' (optional) : (str) announce a blob, specified by blob_hash - 'stream_hash' (optional) : (str) announce all blobs associated with - stream_hash - 'sd_hash' (optional) : (str) announce all blobs associated with - sd_hash and the sd_hash itself - -Returns: - (bool) true if successful -``` - -## blob_availability - -```text -Get blob availability - -Args: - 'blob_hash' (optional) : (str) check availability for this blob hash - 'search_timeout' (optional) : (int) how long to search for peers for the blob - in the dht - 'blob_timeout' (optional) : (int) how long to try downloading from a peer - -Returns: - (dict) { - "is_available": - "reachable_peers": [":"], - "unreachable_peers": [":"] - } -``` - -## blob_delete - -```text -Delete a blob - -Args: - 'blob_hash' (optional) : (str) blob hash of the blob to delete - -Returns: - (str) Success/fail message -``` - -## blob_get - -```text -Download and return a blob - -Args: - 'blob_hash' : (str) blob hash of the blob to get - 'timeout' (optional) : (int) timeout in number of seconds - 'encoding' (optional) : (str) by default no attempt at decoding - is made, can be set to one of the - following decoders: - 'json' - 'payment_rate_manager' (optional) : (str) if not given the default payment rate - manager will be used. - supported alternative rate managers: - 'only-free' - -Returns: - (str) Success/Fail message or (dict) decoded data -``` - -## blob_list - -```text -Returns blob hashes. If not given filters, returns all blobs known by the blob manager - -Args: - 'needed' (optional) : (bool) only return needed blobs - 'finished' (optional) : (bool) only return finished blobs - 'uri' (optional) : (str) filter blobs by stream in a uri - 'stream_hash' (optional) : (str) filter blobs by stream hash - 'sd_hash' (optional) : (str) filter blobs by sd hash - 'page_size' (optional) : (int) results page size - 'page' (optional) : (int) page of results to return - -Returns: - (list) List of blob hashes -``` - -## blob_reflect_all - -```text -Reflects all saved blobs - -Args: - None - -Returns: - (bool) true if successful -``` - -## block_show - -```text -Get contents of a block - -Args: - 'blockhash' : (str) hash of the block to look up - 'height' : (int) height of the block to look up - -Returns: - (dict) Requested block -``` - -## channel_export - -```text -Export serialized channel signing information for a given certificate claim id - -Args: - 'claim_id' : (str) Claim ID to export information about - -Returns: - (str) Serialized certificate information -``` - -## channel_import - -```text -Import serialized channel signing information (to allow signing new claims to the channel) - -Args: - 'serialized_certificate_info' (optional) : (str) certificate info - -Returns: - (dict) Result dictionary -``` - -## channel_list - -```text -Get certificate claim infos for channels that can be published to - -Args: - None - -Returns: - (list) ClaimDict, includes 'is_mine' field to indicate if the certificate claim - is in the wallet. -``` - -## channel_new - -```text -Generate a publisher key and create a new '@' prefixed certificate claim - -Args: - 'channel_name' : (str) name of the channel prefixed with '@' - 'amount' : (float) bid amount on the channel - -Returns: - (dict) Dictionary containing result of the claim - { - 'tx' : (str) hex encoded transaction - 'txid' : (str) txid of resulting claim - 'nout' : (int) nout of the resulting claim - 'fee' : (float) fee paid for the claim transaction - 'claim_id' : (str) claim ID of the resulting claim - } -``` - -## claim_abandon - -```text -Abandon a name and reclaim credits from the claim - -Args: - 'claim_id' (optional) : (str) claim_id of the claim to abandon - 'txid' (optional) : (str) txid of the claim to abandon - 'nout' (optional) : (int) nout of the claim to abandon - -Returns: - (dict) Dictionary containing result of the claim - { - txid : (str) txid of resulting transaction - fee : (float) fee paid for the transaction - } -``` - -## claim_list - -```text -List current claims and information about them for a given name - -Args: - 'name' : (str) name of the claim to list info about - -Returns: - (dict) State of claims assigned for the name - { - 'claims': (list) list of claims for the name - [ - { - 'amount': (float) amount assigned to the claim - 'effective_amount': (float) total amount assigned to the claim, - including supports - 'claim_id': (str) claim ID of the claim - 'height': (int) height of block containing the claim - 'txid': (str) txid of the claim - 'nout': (int) nout of the claim - 'permanent_url': (str) permanent url of the claim, - 'supports': (list) a list of supports attached to the claim - 'value': (str) the value of the claim - }, - ] - 'supports_without_claims': (list) supports without any claims attached to them - 'last_takeover_height': (int) the height of last takeover for the name - } -``` - -## claim_list_by_channel - -```text -Get paginated claims in a channel specified by a channel uri - -Args: - 'uri' : (str) uri of the channel - 'uris' (optional) : (list) uris of the channel - 'page' (optional) : (int) which page of results to return where page 1 is the first - page, defaults to no pages - 'page_size' (optional) : (int) number of results in a page, default of 10 - -Returns: - { - resolved channel uri: { - If there was an error: - 'error': (str) error message - - 'claims_in_channel': the total number of results for the channel, - - If a page of results was requested: - 'returned_page': page number returned, - 'claims_in_channel': [ - { - 'absolute_channel_position': (int) claim index number in sorted list of - claims which assert to be part of the - channel - 'address': (str) claim address, - 'amount': (float) claim amount, - 'effective_amount': (float) claim amount including supports, - 'claim_id': (str) claim id, - 'claim_sequence': (int) claim sequence number, - 'decoded_claim': (bool) whether or not the claim value was decoded, - 'height': (int) claim height, - 'depth': (int) claim depth, - 'has_signature': (bool) included if decoded_claim - 'name': (str) claim name, - 'supports: (list) list of supports [{'txid': (str) txid, - 'nout': (int) nout, - 'amount': (float) amount}], - 'txid': (str) claim txid, - 'nout': (str) claim nout, - 'signature_is_valid': (bool), included if has_signature, - 'value': ClaimDict if decoded, otherwise hex string - } - ], - } - } -``` - -## claim_list_mine - -```text -List my name claims - -Args: - None - -Returns: - (list) List of name claims owned by user - [ - { - 'address': (str) address that owns the claim - 'amount': (float) amount assigned to the claim - 'blocks_to_expiration': (int) number of blocks until it expires - 'category': (str) "claim", "update" , or "support" - 'claim_id': (str) claim ID of the claim - 'confirmations': (int) number of blocks of confirmations for the claim - 'expiration_height': (int) the block height which the claim will expire - 'expired': (bool) true if expired, false otherwise - 'height': (int) height of the block containing the claim - 'is_spent': (bool) true if claim is abandoned, false otherwise - 'name': (str) name of the claim - 'permanent_url': (str) permanent url of the claim, - 'txid': (str) txid of the cliam - 'nout': (int) nout of the claim - 'value': (str) value of the claim - }, - ] -``` - -## claim_new_support - -```text -Support a name claim - -Args: - 'name' : (str) name of the claim to support - 'claim_id' : (str) claim_id of the claim to support - 'amount' : (float) amount of support - -Returns: - (dict) Dictionary containing result of the claim - { - txid : (str) txid of resulting support claim - nout : (int) nout of the resulting support claim - fee : (float) fee paid for the transaction - } -``` - -## claim_renew - -```text -Renew claim(s) or support(s) - -Args: - 'outpoint' : (str) outpoint of the claim to renew - 'height' : (str) update claims expiring before or at this block height - -Returns: - (dict) Dictionary where key is the the original claim's outpoint and - value is the result of the renewal - { - outpoint:{ - - 'tx' : (str) hex encoded transaction - 'txid' : (str) txid of resulting claim - 'nout' : (int) nout of the resulting claim - 'fee' : (float) fee paid for the claim transaction - 'claim_id' : (str) claim ID of the resulting claim - }, - } -``` - -## claim_send_to_address - -```text -Send a name claim to an address - -Args: - 'claim_id' : (str) claim_id to send - 'address' : (str) address to send the claim to - 'amount' (optional) : (int) Amount of credits to claim name for, defaults to the current amount - on the claim - -Returns: - (dict) Dictionary containing result of the claim - { - 'tx' : (str) hex encoded transaction - 'txid' : (str) txid of resulting claim - 'nout' : (int) nout of the resulting claim - 'fee' : (float) fee paid for the claim transaction - 'claim_id' : (str) claim ID of the resulting claim - } -``` - -## claim_show - -```text -Resolve claim info from txid/nout or with claim ID - -Args: - 'txid' (optional) : (str) look for claim with this txid, nout must - also be specified - 'nout' (optional) : (int) look for claim with this nout, txid must - also be specified - 'claim_id' (optional) : (str) look for claim with this claim id - -Returns: - (dict) Dictionary containing claim info as below, - - { - 'txid': (str) txid of claim - 'nout': (int) nout of claim - 'amount': (float) amount of claim - 'value': (str) value of claim - 'height' : (int) height of claim takeover - 'claim_id': (str) claim ID of claim - 'supports': (list) list of supports associated with claim - } - - if claim cannot be resolved, dictionary as below will be returned - - { - 'error': (str) reason for error - } -``` - -## cli_test_command - -```text -This command is only for testing the CLI argument parsing -Args: - 'a_arg' (optional) : a arg - 'b_arg' (optional) : b arg - 'pos_arg' : pos arg - 'pos_args' (optional) : pos args - 'pos_arg2' (optional) : pos arg 2 - 'pos_arg3' (optional) : pos arg 3 - -Returns: - pos args -``` - -## commands - -```text -Return a list of available commands - -Args: - None - -Returns: - (list) list of available commands -``` - -## daemon_stop - -```text -Stop lbrynet-daemon - -Args: - None - -Returns: - (string) Shutdown message -``` - -## file_delete - -```text -Delete a LBRY file - -Args: - 'delete_from_download_dir' (optional) : (bool) delete file from download directory, - instead of just deleting blobs - 'delete_all' (optional) : (bool) if there are multiple matching files, - allow the deletion of multiple files. - Otherwise do not delete anything. - 'sd_hash' (optional) : (str) delete by file sd hash - 'file_name' (optional) : (str) delete by file name in downloads folder - 'stream_hash' (optional) : (str) delete by file stream hash - 'rowid' (optional) : (int) delete by file row id - 'claim_id' (optional) : (str) delete by file claim id - 'txid' (optional) : (str) delete by file claim txid - 'nout' (optional) : (int) delete by file claim nout - 'claim_name' (optional) : (str) delete by file claim name - 'channel_claim_id' (optional) : (str) delete by file channel claim id - 'channel_name' (optional) : (str) delete by file channel claim name - -Returns: - (bool) true if deletion was successful -``` - -## file_list - -```text -List files limited by optional filters - -Args: - 'sd_hash' (optional) : (str) get file with matching sd hash - 'file_name' (optional) : (str) get file with matching file name in the - downloads folder - 'stream_hash' (optional) : (str) get file with matching stream hash - 'rowid' (optional) : (int) get file with matching row id - 'claim_id' (optional) : (str) get file with matching claim id - 'outpoint' (optional) : (str) get file with matching claim outpoint - 'txid' (optional) : (str) get file with matching claim txid - 'nout' (optional) : (int) get file with matching claim nout - 'channel_claim_id' (optional) : (str) get file with matching channel claim id - 'channel_name' (optional) : (str) get file with matching channel name - 'claim_name' (optional) : (str) get file with matching claim name - 'full_status' (optional) : (bool) full status, populate the - 'message' and 'size' fields - -Returns: - (list) List of files - - [ - { - 'completed': (bool) true if download is completed, - 'file_name': (str) name of file, - 'download_directory': (str) download directory, - 'points_paid': (float) credit paid to download file, - 'stopped': (bool) true if download is stopped, - 'stream_hash': (str) stream hash of file, - 'stream_name': (str) stream name , - 'suggested_file_name': (str) suggested file name, - 'sd_hash': (str) sd hash of file, - 'download_path': (str) download path of file, - 'mime_type': (str) mime type of file, - 'key': (str) key attached to file, - 'total_bytes': (int) file size in bytes, None if full_status is false, - 'written_bytes': (int) written size in bytes, - 'blobs_completed': (int) num_completed, None if full_status is false, - 'blobs_in_stream': (int) None if full_status is false, - 'status': (str) downloader status, None if full_status is false, - 'claim_id': (str) None if full_status is false or if claim is not found, - 'outpoint': (str) None if full_status is false or if claim is not found, - 'txid': (str) None if full_status is false or if claim is not found, - 'nout': (int) None if full_status is false or if claim is not found, - 'metadata': (dict) None if full_status is false or if claim is not found, - 'channel_claim_id': (str) None if full_status is false or if claim is not found or signed, - 'channel_name': (str) None if full_status is false or if claim is not found or signed, - 'claim_name': (str) None if full_status is false or if claim is not found - }, - ] -``` - -## file_reflect - -```text -Reflect all the blobs in a file matching the filter criteria - -Args: - 'sd_hash' (optional) : (str) get file with matching sd hash - 'file_name' (optional) : (str) get file with matching file name in the - downloads folder - 'stream_hash' (optional) : (str) get file with matching stream hash - 'rowid' (optional) : (int) get file with matching row id - 'reflector' (optional) : (str) reflector server, ip address or url - by default choose a server from the config - -Returns: - (list) list of blobs reflected -``` - -## file_set_status - -```text -Start or stop downloading a file - -Args: - 'status' : (str) one of "start" or "stop" - 'sd_hash' (optional) : (str) set status of file with matching sd hash - 'file_name' (optional) : (str) set status of file with matching file name in the - downloads folder - 'stream_hash' (optional) : (str) set status of file with matching stream hash - 'rowid' (optional) : (int) set status of file with matching row id - -Returns: - (str) Confirmation message -``` - -## get - -```text -Download stream from a LBRY name. - -Args: - 'uri' (optional) : (str) uri of the content to download - 'file_name' (optional) : (str) specified name for the downloaded file - 'timeout' (optional) : (int) download timeout in number of seconds - -Returns: - (dict) Dictionary containing information about the stream - { - 'completed': (bool) true if download is completed, - 'file_name': (str) name of file, - 'download_directory': (str) download directory, - 'points_paid': (float) credit paid to download file, - 'stopped': (bool) true if download is stopped, - 'stream_hash': (str) stream hash of file, - 'stream_name': (str) stream name , - 'suggested_file_name': (str) suggested file name, - 'sd_hash': (str) sd hash of file, - 'download_path': (str) download path of file, - 'mime_type': (str) mime type of file, - 'key': (str) key attached to file, - 'total_bytes': (int) file size in bytes, None if full_status is false, - 'written_bytes': (int) written size in bytes, - 'blobs_completed': (int) num_completed, None if full_status is false, - 'blobs_in_stream': (int) None if full_status is false, - 'status': (str) downloader status, None if full_status is false, - 'claim_id': (str) claim id, - 'outpoint': (str) claim outpoint string, - 'txid': (str) claim txid, - 'nout': (int) claim nout, - 'metadata': (dict) claim metadata, - 'channel_claim_id': (str) None if claim is not signed - 'channel_name': (str) None if claim is not signed - 'claim_name': (str) claim name - } -``` - -## help - -```text -Return a useful message for an API command - -Args: - 'command' (optional) : (str) command to retrieve documentation for - -Returns: - (str) Help message -``` - -## peer_list - -```text -Get peers for blob hash - -Args: - 'blob_hash' : (str) find available peers for this blob hash - 'timeout' (optional) : (int) peer search timeout in seconds - -Returns: - (list) List of contacts -``` - -## publish - -```text -Make a new name claim and publish associated data to lbrynet, -update over existing claim if user already has a claim for name. - -Fields required in the final Metadata are: - 'title' - 'description' - 'author' - 'language' - 'license' - 'nsfw' - -Metadata can be set by either using the metadata argument or by setting individual arguments -fee, title, description, author, language, license, license_url, thumbnail, preview, nsfw, -or sources. Individual arguments will overwrite the fields specified in metadata argument. - -Args: - 'name' : (str) name of the content - 'bid' : (float) amount to back the claim - 'metadata' (optional) : (dict) ClaimDict to associate with the claim. - 'file_path' (optional) : (str) path to file to be associated with name. If provided, - a lbry stream of this file will be used in 'sources'. - If no path is given but a sources dict is provided, - it will be used. If neither are provided, an - error is raised. - 'fee' (optional) : (dict) Dictionary representing key fee to download content: - { - 'currency': currency_symbol, - 'amount': float, - 'address': str, optional - } - supported currencies: LBC, USD, BTC - If an address is not provided a new one will be - automatically generated. Default fee is zero. - 'title' (optional) : (str) title of the publication - 'description' (optional) : (str) description of the publication - 'author' (optional) : (str) author of the publication - 'language' (optional) : (str) language of the publication - 'license' (optional) : (str) publication license - 'license_url' (optional) : (str) publication license url - 'thumbnail' (optional) : (str) thumbnail url - 'preview' (optional) : (str) preview url - 'nsfw' (optional) : (bool) title of the publication - 'sources' (optional) : (str) {'lbry_sd_hash': sd_hash} specifies sd hash of file - 'channel_name' (optional) : (str) name of the publisher channel name in the wallet - 'channel_id' (optional) : (str) claim id of the publisher channel, does not check - for channel claim being in the wallet. This allows - publishing to a channel where only the certificate - private key is in the wallet. - 'claim_address' (optional) : (str) address where the claim is sent to, if not specified - new address wil automatically be created - -Returns: - (dict) Dictionary containing result of the claim - { - 'tx' : (str) hex encoded transaction - 'txid' : (str) txid of resulting claim - 'nout' : (int) nout of the resulting claim - 'fee' : (float) fee paid for the claim transaction - 'claim_id' : (str) claim ID of the resulting claim - } -``` - -## report_bug - -```text -Report a bug to slack - -Args: - 'message' : (str) Description of the bug - -Returns: - (bool) true if successful -``` - -## resolve - -```text -Resolve given LBRY URIs - -Args: - 'force' (optional) : (bool) force refresh and ignore cache - 'uri' : (str) uri to resolve - 'uris' (optional) : (list) uris to resolve - -Returns: - Dictionary of results, keyed by uri - '': { - If a resolution error occurs: - 'error': Error message - - If the uri resolves to a channel or a claim in a channel: - 'certificate': { - 'address': (str) claim address, - 'amount': (float) claim amount, - 'effective_amount': (float) claim amount including supports, - 'claim_id': (str) claim id, - 'claim_sequence': (int) claim sequence number, - 'decoded_claim': (bool) whether or not the claim value was decoded, - 'height': (int) claim height, - 'depth': (int) claim depth, - 'has_signature': (bool) included if decoded_claim - 'name': (str) claim name, - 'permanent_url': (str) permanent url of the certificate claim, - 'supports: (list) list of supports [{'txid': (str) txid, - 'nout': (int) nout, - 'amount': (float) amount}], - 'txid': (str) claim txid, - 'nout': (str) claim nout, - 'signature_is_valid': (bool), included if has_signature, - 'value': ClaimDict if decoded, otherwise hex string - } - - If the uri resolves to a channel: - 'claims_in_channel': (int) number of claims in the channel, - - If the uri resolves to a claim: - 'claim': { - 'address': (str) claim address, - 'amount': (float) claim amount, - 'effective_amount': (float) claim amount including supports, - 'claim_id': (str) claim id, - 'claim_sequence': (int) claim sequence number, - 'decoded_claim': (bool) whether or not the claim value was decoded, - 'height': (int) claim height, - 'depth': (int) claim depth, - 'has_signature': (bool) included if decoded_claim - 'name': (str) claim name, - 'permanent_url': (str) permanent url of the claim, - 'channel_name': (str) channel name if claim is in a channel - 'supports: (list) list of supports [{'txid': (str) txid, - 'nout': (int) nout, - 'amount': (float) amount}] - 'txid': (str) claim txid, - 'nout': (str) claim nout, - 'signature_is_valid': (bool), included if has_signature, - 'value': ClaimDict if decoded, otherwise hex string - } - } -``` - -## resolve_name - -```text -Resolve stream info from a LBRY name - -Args: - 'name' : (str) the name to resolve - 'force' (optional) : (bool) force refresh and do not check cache - -Returns: - (dict) Metadata dictionary from name claim, None if the name is not - resolvable -``` - -## routing_table_get - -```text -Get DHT routing information - -Args: - None - -Returns: - (dict) dictionary containing routing and contact information - { - "buckets": { - : [ - { - "address": (str) peer address, - "node_id": (str) peer node id, - "blobs": (list) blob hashes announced by peer - } - ] - }, - "contacts": (list) contact node ids, - "blob_hashes": (list) all of the blob hashes stored by peers in the list of buckets, - "node_id": (str) the local dht node id - } -``` - -## settings_get - -```text -Get daemon settings - -Args: - None - -Returns: - (dict) Dictionary of daemon settings - See ADJUSTABLE_SETTINGS in lbrynet/conf.py for full list of settings -``` - -## settings_set - -```text -Set daemon settings - -Args: - 'download_directory' (optional) : (str) path of download directory - 'data_rate' (optional) : (float) 0.0001 - 'download_timeout' (optional) : (int) 180 - 'peer_port' (optional) : (int) 3333 - 'max_key_fee' (optional) : (dict) maximum key fee for downloads, - in the format: - { - 'currency': , - 'amount': - }. - In the CLI, it must be an escaped JSON string - Supported currency symbols: LBC, USD, BTC - 'disable_max_key_fee' (optional) : (bool) False - 'use_upnp' (optional) : (bool) True - 'run_reflector_server' (optional) : (bool) False - 'cache_time' (optional) : (int) 150 - 'reflect_uploads' (optional) : (bool) True - 'share_usage_data' (optional) : (bool) True - 'peer_search_timeout' (optional) : (int) 3 - 'sd_download_timeout' (optional) : (int) 3 - 'auto_renew_claim_height_delta' (optional) : (int) 0 - claims set to expire within this many blocks will be - automatically renewed after startup (if set to 0, renews - will not be made automatically) - -Returns: - (dict) Updated dictionary of daemon settings -``` - -## status - -```text -Get daemon status - -Args: - 'session_status' (optional) : (bool) include session status in results - 'dht_status' (optional) : (bool) include dht network and peer status - -Returns: - (dict) lbrynet-daemon status - { - 'lbry_id': lbry peer id, base58, - 'installation_id': installation id, base58, - 'is_running': bool, - 'is_first_run': bool, - 'startup_status': { - 'code': status code, - 'message': status message - }, - 'connection_status': { - 'code': connection status code, - 'message': connection status message - }, - 'blockchain_status': { - 'blocks': local blockchain height, - 'blocks_behind': remote_height - local_height, - 'best_blockhash': block hash of most recent block, - }, - 'wallet_is_encrypted': bool, - - If given the session status option: - 'session_status': { - 'managed_blobs': count of blobs in the blob manager, - 'managed_streams': count of streams in the file manager - 'announce_queue_size': number of blobs currently queued to be announced - 'should_announce_blobs': number of blobs that should be announced - } - - If given the dht status option: - 'dht_status': { - 'kbps_received': current kbps receiving, - 'kbps_sent': current kdps being sent, - 'total_bytes_sent': total bytes sent, - 'total_bytes_received': total bytes received, - 'queries_received': number of queries received per second, - 'queries_sent': number of queries sent per second, - 'recent_contacts': count of recently contacted peers, - 'unique_contacts': count of unique peers - }, - } -``` - -## stream_availability - -```text -Get stream availability for lbry uri - -Args: - 'uri' : (str) check availability for this uri - 'search_timeout' (optional) : (int) how long to search for peers for the blob - in the dht - 'search_timeout' (optional) : (int) how long to try downloading from a peer - -Returns: - (dict) { - 'is_available': , - 'did_decode': , - 'did_resolve': , - 'is_stream': , - 'num_blobs_in_stream': , - 'sd_hash': , - 'sd_blob_availability': see `blob_availability`, - 'head_blob_hash': , - 'head_blob_availability': see `blob_availability`, - 'use_upnp': , - 'upnp_redirect_is_set': , - 'error': | error message - } -``` - -## stream_cost_estimate - -```text -Get estimated cost for a lbry stream - -Args: - 'uri' : (str) uri to use - 'size' (optional) : (float) stream size in bytes. if provided an sd blob won't be - downloaded. - -Returns: - (float) Estimated cost in lbry credits, returns None if uri is not - resolvable -``` - -## transaction_list - -```text -List transactions belonging to wallet - -Args: - None - -Returns: - (list) List of transactions - - { - "claim_info": (list) claim info if in txn [{ - "address": (str) address of claim, - "balance_delta": (float) bid amount, - "amount": (float) claim amount, - "claim_id": (str) claim id, - "claim_name": (str) claim name, - "nout": (int) nout - }], - "abandon_info": (list) abandon info if in txn [{ - "address": (str) address of abandoned claim, - "balance_delta": (float) returned amount, - "amount": (float) claim amount, - "claim_id": (str) claim id, - "claim_name": (str) claim name, - "nout": (int) nout - }], - "confirmations": (int) number of confirmations for the txn, - "date": (str) date and time of txn, - "fee": (float) txn fee, - "support_info": (list) support info if in txn [{ - "address": (str) address of support, - "balance_delta": (float) support amount, - "amount": (float) support amount, - "claim_id": (str) claim id, - "claim_name": (str) claim name, - "is_tip": (bool), - "nout": (int) nout - }], - "timestamp": (int) timestamp, - "txid": (str) txn id, - "update_info": (list) update info if in txn [{ - "address": (str) address of claim, - "balance_delta": (float) credited/debited - "amount": (float) absolute amount, - "claim_id": (str) claim id, - "claim_name": (str) claim name, - "nout": (int) nout - }], - "value": (float) value of txn - } -``` - -## transaction_show - -```text -Get a decoded transaction from a txid - -Args: - 'txid' : (str) txid of the transaction - -Returns: - (dict) JSON formatted transaction -``` - -## utxo_list - -```text -List unspent transaction outputs - -Args: - None - -Returns: - (list) List of unspent transaction outputs (UTXOs) - [ - { - "address": (str) the output address - "amount": (float) unspent amount - "height": (int) block height - "is_claim": (bool) is the tx a claim - "is_coinbase": (bool) is the tx a coinbase tx - "is_support": (bool) is the tx a support - "is_update": (bool) is the tx an update - "nout": (int) nout of the output - "txid": (str) txid of the output - }, - ... - ] -``` - -## version - -```text -Get lbry version information - -Args: - None - -Returns: - (dict) Dictionary of lbry version information - { - 'build': (str) build type (e.g. "dev", "rc", "release"), - 'ip': (str) remote ip, if available, - 'lbrynet_version': (str) lbrynet_version, - 'lbryum_version': (str) lbryum_version, - 'lbryschema_version': (str) lbryschema_version, - 'os_release': (str) os release string - 'os_system': (str) os name - 'platform': (str) platform string - 'processor': (str) processor type, - 'python_version': (str) python version, - } -``` - -## wallet_balance - -```text -Return the balance of the wallet - -Args: - 'address' (optional) : (str) If provided only the balance for this - address will be given - 'include_unconfirmed' (optional) : (bool) Include unconfirmed - -Returns: - (float) amount of lbry credits in wallet -``` - -## wallet_decrypt - -```text -Decrypt an encrypted wallet, this will remove the wallet password - -Args: - None - -Returns: - (bool) true if wallet is decrypted, otherwise false -``` - -## wallet_encrypt - -```text -Encrypt a wallet with a password, if the wallet is already encrypted this will update -the password - -Args: - 'new_password' : (str) password string to be used for encrypting wallet - -Returns: - (bool) true if wallet is decrypted, otherwise false -``` - -## wallet_is_address_mine - -```text -Checks if an address is associated with the current wallet. - -Args: - 'address' : (str) address to check - -Returns: - (bool) true, if address is associated with current wallet -``` - -## wallet_list - -```text -List wallet addresses - -Args: - None - -Returns: - List of wallet addresses -``` - -## wallet_new_address - -```text -Generate a new wallet address - -Args: - None - -Returns: - (str) New wallet address in base58 -``` - -## wallet_prefill_addresses - -```text -Create new addresses, each containing `amount` credits - -Args: - 'no_broadcast' (optional) : (bool) whether to broadcast or not - 'num_addresses' : (int) num of addresses to create - 'amount' : (float) initial amount in each address - -Returns: - (dict) the resulting transaction -``` - -## wallet_public_key - -```text -Get public key from wallet address - -Args: - 'address' : (str) address for which to get the public key - -Returns: - (list) list of public keys associated with address. - Could contain more than one public key if multisig. -``` - -## wallet_send - -```text -Send credits. If given an address, send credits to it. If given a claim id, send a tip -to the owner of a claim specified by uri. A tip is a claim support where the recipient -of the support is the claim address for the claim being supported. - -Args: - 'amount' : (float) amount of credit to send - 'address' : (str) address to send credits to - 'claim_id' : (float) claim_id of the claim to send to tip to - -Returns: - If sending to an address: - (bool) true if payment successfully scheduled - - If sending a claim tip: - (dict) Dictionary containing the result of the support - { - txid : (str) txid of resulting support claim - nout : (int) nout of the resulting support claim - fee : (float) fee paid for the transaction - } -``` - -## wallet_unlock - -```text -Unlock an encrypted wallet - -Args: - 'password' : (str) password for unlocking wallet - -Returns: - (bool) true if wallet is unlocked, otherwise false -``` - -## wallet_unused_address - -```text -Return an address containing no balance, will create -a new address if there is none. - -Args: - None - -Returns: - (str) Unused wallet address in base58 -``` - diff --git a/docs/search/search_index.json b/docs/search/search_index.json new file mode 100644 index 000000000..62ea94c34 --- /dev/null +++ b/docs/search/search_index.json @@ -0,0 +1,564 @@ +{ + "docs": [ + { + "location": "/", + "text": "LBRY JSON-RPC API Documentation\n\n\nblob_announce\n\n\nAnnounce blobs to the DHT\n\nArgs:\n 'announce_all' (optional) : (bool) announce all the blobs possessed by user\n 'blob_hash' (optional) : (str) announce a blob, specified by blob_hash\n 'stream_hash' (optional) : (str) announce all blobs associated with\n stream_hash\n 'sd_hash' (optional) : (str) announce all blobs associated with\n sd_hash and the sd_hash itself\n\nReturns:\n (bool) true if successful\n\n\n\n\nblob_availability\n\n\nGet blob availability\n\nArgs:\n 'blob_hash' (optional) : (str) check availability for this blob hash\n 'search_timeout' (optional) : (int) how long to search for peers for the blob\n in the dht\n 'blob_timeout' (optional) : (int) how long to try downloading from a peer\n\nReturns:\n (dict) {\n \nis_available\n: \nbool, true if blob is available from a peer from peer list\n\n \nreachable_peers\n: [\nip\n:\nport\n],\n \nunreachable_peers\n: [\nip\n:\nport\n]\n }\n\n\n\n\nblob_delete\n\n\nDelete a blob\n\nArgs:\n 'blob_hash' (optional) : (str) blob hash of the blob to delete\n\nReturns:\n (str) Success/fail message\n\n\n\n\nblob_get\n\n\nDownload and return a blob\n\nArgs:\n 'blob_hash' : (str) blob hash of the blob to get\n 'timeout' (optional) : (int) timeout in number of seconds\n 'encoding' (optional) : (str) by default no attempt at decoding\n is made, can be set to one of the\n following decoders:\n 'json'\n 'payment_rate_manager' (optional) : (str) if not given the default payment rate\n manager will be used.\n supported alternative rate managers:\n 'only-free'\n\nReturns:\n (str) Success/Fail message or (dict) decoded data\n\n\n\n\nblob_list\n\n\nReturns blob hashes. If not given filters, returns all blobs known by the blob manager\n\nArgs:\n 'needed' (optional) : (bool) only return needed blobs\n 'finished' (optional) : (bool) only return finished blobs\n 'uri' (optional) : (str) filter blobs by stream in a uri\n 'stream_hash' (optional) : (str) filter blobs by stream hash\n 'sd_hash' (optional) : (str) filter blobs by sd hash\n 'page_size' (optional) : (int) results page size\n 'page' (optional) : (int) page of results to return\n\nReturns:\n (list) List of blob hashes\n\n\n\n\nblob_reflect_all\n\n\nReflects all saved blobs\n\nArgs:\n None\n\nReturns:\n (bool) true if successful\n\n\n\n\nblock_show\n\n\nGet contents of a block\n\nArgs:\n 'blockhash' : (str) hash of the block to look up\n 'height' : (int) height of the block to look up\n\nReturns:\n (dict) Requested block\n\n\n\n\nchannel_export\n\n\nExport serialized channel signing information for a given certificate claim id\n\nArgs:\n 'claim_id' : (str) Claim ID to export information about\n\nReturns:\n (str) Serialized certificate information\n\n\n\n\nchannel_import\n\n\nImport serialized channel signing information (to allow signing new claims to the channel)\n\nArgs:\n 'serialized_certificate_info' (optional) : (str) certificate info\n\nReturns:\n (dict) Result dictionary\n\n\n\n\nchannel_list\n\n\nGet certificate claim infos for channels that can be published to\n\nArgs:\n None\n\nReturns:\n (list) ClaimDict, includes 'is_mine' field to indicate if the certificate claim\n is in the wallet.\n\n\n\n\nchannel_new\n\n\nGenerate a publisher key and create a new '@' prefixed certificate claim\n\nArgs:\n 'channel_name' : (str) name of the channel prefixed with '@'\n 'amount' : (float) bid amount on the channel\n\nReturns:\n (dict) Dictionary containing result of the claim\n {\n 'tx' : (str) hex encoded transaction\n 'txid' : (str) txid of resulting claim\n 'nout' : (int) nout of the resulting claim\n 'fee' : (float) fee paid for the claim transaction\n 'claim_id' : (str) claim ID of the resulting claim\n }\n\n\n\n\nclaim_abandon\n\n\nAbandon a name and reclaim credits from the claim\n\nArgs:\n 'claim_id' (optional) : (str) claim_id of the claim to abandon\n 'txid' (optional) : (str) txid of the claim to abandon\n 'nout' (optional) : (int) nout of the claim to abandon\n\nReturns:\n (dict) Dictionary containing result of the claim\n {\n txid : (str) txid of resulting transaction\n fee : (float) fee paid for the transaction\n }\n\n\n\n\nclaim_list\n\n\nList current claims and information about them for a given name\n\nArgs:\n 'name' : (str) name of the claim to list info about\n\nReturns:\n (dict) State of claims assigned for the name\n {\n 'claims': (list) list of claims for the name\n [\n {\n 'amount': (float) amount assigned to the claim\n 'effective_amount': (float) total amount assigned to the claim,\n including supports\n 'claim_id': (str) claim ID of the claim\n 'height': (int) height of block containing the claim\n 'txid': (str) txid of the claim\n 'nout': (int) nout of the claim\n 'permanent_url': (str) permanent url of the claim,\n 'supports': (list) a list of supports attached to the claim\n 'value': (str) the value of the claim\n },\n ]\n 'supports_without_claims': (list) supports without any claims attached to them\n 'last_takeover_height': (int) the height of last takeover for the name\n }\n\n\n\n\nclaim_list_by_channel\n\n\nGet paginated claims in a channel specified by a channel uri\n\nArgs:\n 'uri' : (str) uri of the channel\n 'uris' (optional) : (list) uris of the channel\n 'page' (optional) : (int) which page of results to return where page 1 is the first\n page, defaults to no pages\n 'page_size' (optional) : (int) number of results in a page, default of 10\n\nReturns:\n {\n resolved channel uri: {\n If there was an error:\n 'error': (str) error message\n\n 'claims_in_channel': the total number of results for the channel,\n\n If a page of results was requested:\n 'returned_page': page number returned,\n 'claims_in_channel': [\n {\n 'absolute_channel_position': (int) claim index number in sorted list of\n claims which assert to be part of the\n channel\n 'address': (str) claim address,\n 'amount': (float) claim amount,\n 'effective_amount': (float) claim amount including supports,\n 'claim_id': (str) claim id,\n 'claim_sequence': (int) claim sequence number,\n 'decoded_claim': (bool) whether or not the claim value was decoded,\n 'height': (int) claim height,\n 'depth': (int) claim depth,\n 'has_signature': (bool) included if decoded_claim\n 'name': (str) claim name,\n 'supports: (list) list of supports [{'txid': (str) txid,\n 'nout': (int) nout,\n 'amount': (float) amount}],\n 'txid': (str) claim txid,\n 'nout': (str) claim nout,\n 'signature_is_valid': (bool), included if has_signature,\n 'value': ClaimDict if decoded, otherwise hex string\n }\n ],\n }\n }\n\n\n\n\nclaim_list_mine\n\n\nList my name claims\n\nArgs:\n None\n\nReturns:\n (list) List of name claims owned by user\n [\n {\n 'address': (str) address that owns the claim\n 'amount': (float) amount assigned to the claim\n 'blocks_to_expiration': (int) number of blocks until it expires\n 'category': (str) \nclaim\n, \nupdate\n , or \nsupport\n\n 'claim_id': (str) claim ID of the claim\n 'confirmations': (int) number of blocks of confirmations for the claim\n 'expiration_height': (int) the block height which the claim will expire\n 'expired': (bool) true if expired, false otherwise\n 'height': (int) height of the block containing the claim\n 'is_spent': (bool) true if claim is abandoned, false otherwise\n 'name': (str) name of the claim\n 'permanent_url': (str) permanent url of the claim,\n 'txid': (str) txid of the cliam\n 'nout': (int) nout of the claim\n 'value': (str) value of the claim\n },\n ]\n\n\n\n\nclaim_new_support\n\n\nSupport a name claim\n\nArgs:\n 'name' : (str) name of the claim to support\n 'claim_id' : (str) claim_id of the claim to support\n 'amount' : (float) amount of support\n\nReturns:\n (dict) Dictionary containing result of the claim\n {\n txid : (str) txid of resulting support claim\n nout : (int) nout of the resulting support claim\n fee : (float) fee paid for the transaction\n }\n\n\n\n\nclaim_renew\n\n\nRenew claim(s) or support(s)\n\nArgs:\n 'outpoint' : (str) outpoint of the claim to renew\n 'height' : (str) update claims expiring before or at this block height\n\nReturns:\n (dict) Dictionary where key is the the original claim's outpoint and\n value is the result of the renewal\n {\n outpoint:{\n\n 'tx' : (str) hex encoded transaction\n 'txid' : (str) txid of resulting claim\n 'nout' : (int) nout of the resulting claim\n 'fee' : (float) fee paid for the claim transaction\n 'claim_id' : (str) claim ID of the resulting claim\n },\n }\n\n\n\n\nclaim_send_to_address\n\n\nSend a name claim to an address\n\nArgs:\n 'claim_id' : (str) claim_id to send\n 'address' : (str) address to send the claim to\n 'amount' (optional) : (int) Amount of credits to claim name for, defaults to the current amount\n on the claim\n\nReturns:\n (dict) Dictionary containing result of the claim\n {\n 'tx' : (str) hex encoded transaction\n 'txid' : (str) txid of resulting claim\n 'nout' : (int) nout of the resulting claim\n 'fee' : (float) fee paid for the claim transaction\n 'claim_id' : (str) claim ID of the resulting claim\n }\n\n\n\n\nclaim_show\n\n\nResolve claim info from txid/nout or with claim ID\n\nArgs:\n 'txid' (optional) : (str) look for claim with this txid, nout must\n also be specified\n 'nout' (optional) : (int) look for claim with this nout, txid must\n also be specified\n 'claim_id' (optional) : (str) look for claim with this claim id\n\nReturns:\n (dict) Dictionary containing claim info as below,\n\n {\n 'txid': (str) txid of claim\n 'nout': (int) nout of claim\n 'amount': (float) amount of claim\n 'value': (str) value of claim\n 'height' : (int) height of claim takeover\n 'claim_id': (str) claim ID of claim\n 'supports': (list) list of supports associated with claim\n }\n\n if claim cannot be resolved, dictionary as below will be returned\n\n {\n 'error': (str) reason for error\n }\n\n\n\n\ncli_test_command\n\n\nThis command is only for testing the CLI argument parsing\nArgs:\n 'a_arg' (optional) : a arg\n 'b_arg' (optional) : b arg\n 'pos_arg' : pos arg\n 'pos_args' (optional) : pos args\n 'pos_arg2' (optional) : pos arg 2\n 'pos_arg3' (optional) : pos arg 3\n\nReturns:\n pos args\n\n\n\n\ncommands\n\n\nReturn a list of available commands\n\nArgs:\n None\n\nReturns:\n (list) list of available commands\n\n\n\n\ndaemon_stop\n\n\nStop lbrynet-daemon\n\nArgs:\n None\n\nReturns:\n (string) Shutdown message\n\n\n\n\nfile_delete\n\n\nDelete a LBRY file\n\nArgs:\n 'delete_from_download_dir' (optional) : (bool) delete file from download directory,\n instead of just deleting blobs\n 'delete_all' (optional) : (bool) if there are multiple matching files,\n allow the deletion of multiple files.\n Otherwise do not delete anything.\n 'sd_hash' (optional) : (str) delete by file sd hash\n 'file_name' (optional) : (str) delete by file name in downloads folder\n 'stream_hash' (optional) : (str) delete by file stream hash\n 'rowid' (optional) : (int) delete by file row id\n 'claim_id' (optional) : (str) delete by file claim id\n 'txid' (optional) : (str) delete by file claim txid\n 'nout' (optional) : (int) delete by file claim nout\n 'claim_name' (optional) : (str) delete by file claim name\n 'channel_claim_id' (optional) : (str) delete by file channel claim id\n 'channel_name' (optional) : (str) delete by file channel claim name\n\nReturns:\n (bool) true if deletion was successful\n\n\n\n\nfile_list\n\n\nList files limited by optional filters\n\nArgs:\n 'sd_hash' (optional) : (str) get file with matching sd hash\n 'file_name' (optional) : (str) get file with matching file name in the\n downloads folder\n 'stream_hash' (optional) : (str) get file with matching stream hash\n 'rowid' (optional) : (int) get file with matching row id\n 'claim_id' (optional) : (str) get file with matching claim id\n 'outpoint' (optional) : (str) get file with matching claim outpoint\n 'txid' (optional) : (str) get file with matching claim txid\n 'nout' (optional) : (int) get file with matching claim nout\n 'channel_claim_id' (optional) : (str) get file with matching channel claim id\n 'channel_name' (optional) : (str) get file with matching channel name\n 'claim_name' (optional) : (str) get file with matching claim name\n 'full_status' (optional) : (bool) full status, populate the\n 'message' and 'size' fields\n\nReturns:\n (list) List of files\n\n [\n {\n 'completed': (bool) true if download is completed,\n 'file_name': (str) name of file,\n 'download_directory': (str) download directory,\n 'points_paid': (float) credit paid to download file,\n 'stopped': (bool) true if download is stopped,\n 'stream_hash': (str) stream hash of file,\n 'stream_name': (str) stream name ,\n 'suggested_file_name': (str) suggested file name,\n 'sd_hash': (str) sd hash of file,\n 'download_path': (str) download path of file,\n 'mime_type': (str) mime type of file,\n 'key': (str) key attached to file,\n 'total_bytes': (int) file size in bytes, None if full_status is false,\n 'written_bytes': (int) written size in bytes,\n 'blobs_completed': (int) num_completed, None if full_status is false,\n 'blobs_in_stream': (int) None if full_status is false,\n 'status': (str) downloader status, None if full_status is false,\n 'claim_id': (str) None if full_status is false or if claim is not found,\n 'outpoint': (str) None if full_status is false or if claim is not found,\n 'txid': (str) None if full_status is false or if claim is not found,\n 'nout': (int) None if full_status is false or if claim is not found,\n 'metadata': (dict) None if full_status is false or if claim is not found,\n 'channel_claim_id': (str) None if full_status is false or if claim is not found or signed,\n 'channel_name': (str) None if full_status is false or if claim is not found or signed,\n 'claim_name': (str) None if full_status is false or if claim is not found\n },\n ]\n\n\n\n\nfile_reflect\n\n\nReflect all the blobs in a file matching the filter criteria\n\nArgs:\n 'sd_hash' (optional) : (str) get file with matching sd hash\n 'file_name' (optional) : (str) get file with matching file name in the\n downloads folder\n 'stream_hash' (optional) : (str) get file with matching stream hash\n 'rowid' (optional) : (int) get file with matching row id\n 'reflector' (optional) : (str) reflector server, ip address or url\n by default choose a server from the config\n\nReturns:\n (list) list of blobs reflected\n\n\n\n\nfile_set_status\n\n\nStart or stop downloading a file\n\nArgs:\n 'status' : (str) one of \nstart\n or \nstop\n\n 'sd_hash' (optional) : (str) set status of file with matching sd hash\n 'file_name' (optional) : (str) set status of file with matching file name in the\n downloads folder\n 'stream_hash' (optional) : (str) set status of file with matching stream hash\n 'rowid' (optional) : (int) set status of file with matching row id\n\nReturns:\n (str) Confirmation message\n\n\n\n\nget\n\n\nDownload stream from a LBRY name.\n\nArgs:\n 'uri' (optional) : (str) uri of the content to download\n 'file_name' (optional) : (str) specified name for the downloaded file\n 'timeout' (optional) : (int) download timeout in number of seconds\n\nReturns:\n (dict) Dictionary containing information about the stream\n {\n 'completed': (bool) true if download is completed,\n 'file_name': (str) name of file,\n 'download_directory': (str) download directory,\n 'points_paid': (float) credit paid to download file,\n 'stopped': (bool) true if download is stopped,\n 'stream_hash': (str) stream hash of file,\n 'stream_name': (str) stream name ,\n 'suggested_file_name': (str) suggested file name,\n 'sd_hash': (str) sd hash of file,\n 'download_path': (str) download path of file,\n 'mime_type': (str) mime type of file,\n 'key': (str) key attached to file,\n 'total_bytes': (int) file size in bytes, None if full_status is false,\n 'written_bytes': (int) written size in bytes,\n 'blobs_completed': (int) num_completed, None if full_status is false,\n 'blobs_in_stream': (int) None if full_status is false,\n 'status': (str) downloader status, None if full_status is false,\n 'claim_id': (str) claim id,\n 'outpoint': (str) claim outpoint string,\n 'txid': (str) claim txid,\n 'nout': (int) claim nout,\n 'metadata': (dict) claim metadata,\n 'channel_claim_id': (str) None if claim is not signed\n 'channel_name': (str) None if claim is not signed\n 'claim_name': (str) claim name\n }\n\n\n\n\nhelp\n\n\nReturn a useful message for an API command\n\nArgs:\n 'command' (optional) : (str) command to retrieve documentation for\n\nReturns:\n (str) Help message\n\n\n\n\npeer_list\n\n\nGet peers for blob hash\n\nArgs:\n 'blob_hash' : (str) find available peers for this blob hash\n 'timeout' (optional) : (int) peer search timeout in seconds\n\nReturns:\n (list) List of contacts\n\n\n\n\npublish\n\n\nMake a new name claim and publish associated data to lbrynet,\nupdate over existing claim if user already has a claim for name.\n\nFields required in the final Metadata are:\n 'title'\n 'description'\n 'author'\n 'language'\n 'license'\n 'nsfw'\n\nMetadata can be set by either using the metadata argument or by setting individual arguments\nfee, title, description, author, language, license, license_url, thumbnail, preview, nsfw,\nor sources. Individual arguments will overwrite the fields specified in metadata argument.\n\nArgs:\n 'name' : (str) name of the content\n 'bid' : (float) amount to back the claim\n 'metadata' (optional) : (dict) ClaimDict to associate with the claim.\n 'file_path' (optional) : (str) path to file to be associated with name. If provided,\n a lbry stream of this file will be used in 'sources'.\n If no path is given but a sources dict is provided,\n it will be used. If neither are provided, an\n error is raised.\n 'fee' (optional) : (dict) Dictionary representing key fee to download content:\n {\n 'currency': currency_symbol,\n 'amount': float,\n 'address': str, optional\n }\n supported currencies: LBC, USD, BTC\n If an address is not provided a new one will be\n automatically generated. Default fee is zero.\n 'title' (optional) : (str) title of the publication\n 'description' (optional) : (str) description of the publication\n 'author' (optional) : (str) author of the publication\n 'language' (optional) : (str) language of the publication\n 'license' (optional) : (str) publication license\n 'license_url' (optional) : (str) publication license url\n 'thumbnail' (optional) : (str) thumbnail url\n 'preview' (optional) : (str) preview url\n 'nsfw' (optional) : (bool) title of the publication\n 'sources' (optional) : (str) {'lbry_sd_hash': sd_hash} specifies sd hash of file\n 'channel_name' (optional) : (str) name of the publisher channel name in the wallet\n 'channel_id' (optional) : (str) claim id of the publisher channel, does not check\n for channel claim being in the wallet. This allows\n publishing to a channel where only the certificate\n private key is in the wallet.\n 'claim_address' (optional) : (str) address where the claim is sent to, if not specified\n new address wil automatically be created\n\nReturns:\n (dict) Dictionary containing result of the claim\n {\n 'tx' : (str) hex encoded transaction\n 'txid' : (str) txid of resulting claim\n 'nout' : (int) nout of the resulting claim\n 'fee' : (float) fee paid for the claim transaction\n 'claim_id' : (str) claim ID of the resulting claim\n }\n\n\n\n\nreport_bug\n\n\nReport a bug to slack\n\nArgs:\n 'message' : (str) Description of the bug\n\nReturns:\n (bool) true if successful\n\n\n\n\nresolve\n\n\nResolve given LBRY URIs\n\nArgs:\n 'force' (optional) : (bool) force refresh and ignore cache\n 'uri' : (str) uri to resolve\n 'uris' (optional) : (list) uris to resolve\n\nReturns:\n Dictionary of results, keyed by uri\n '\nuri\n': {\n If a resolution error occurs:\n 'error': Error message\n\n If the uri resolves to a channel or a claim in a channel:\n 'certificate': {\n 'address': (str) claim address,\n 'amount': (float) claim amount,\n 'effective_amount': (float) claim amount including supports,\n 'claim_id': (str) claim id,\n 'claim_sequence': (int) claim sequence number,\n 'decoded_claim': (bool) whether or not the claim value was decoded,\n 'height': (int) claim height,\n 'depth': (int) claim depth,\n 'has_signature': (bool) included if decoded_claim\n 'name': (str) claim name,\n 'permanent_url': (str) permanent url of the certificate claim,\n 'supports: (list) list of supports [{'txid': (str) txid,\n 'nout': (int) nout,\n 'amount': (float) amount}],\n 'txid': (str) claim txid,\n 'nout': (str) claim nout,\n 'signature_is_valid': (bool), included if has_signature,\n 'value': ClaimDict if decoded, otherwise hex string\n }\n\n If the uri resolves to a channel:\n 'claims_in_channel': (int) number of claims in the channel,\n\n If the uri resolves to a claim:\n 'claim': {\n 'address': (str) claim address,\n 'amount': (float) claim amount,\n 'effective_amount': (float) claim amount including supports,\n 'claim_id': (str) claim id,\n 'claim_sequence': (int) claim sequence number,\n 'decoded_claim': (bool) whether or not the claim value was decoded,\n 'height': (int) claim height,\n 'depth': (int) claim depth,\n 'has_signature': (bool) included if decoded_claim\n 'name': (str) claim name,\n 'permanent_url': (str) permanent url of the claim,\n 'channel_name': (str) channel name if claim is in a channel\n 'supports: (list) list of supports [{'txid': (str) txid,\n 'nout': (int) nout,\n 'amount': (float) amount}]\n 'txid': (str) claim txid,\n 'nout': (str) claim nout,\n 'signature_is_valid': (bool), included if has_signature,\n 'value': ClaimDict if decoded, otherwise hex string\n }\n }\n\n\n\n\nresolve_name\n\n\nResolve stream info from a LBRY name\n\nArgs:\n 'name' : (str) the name to resolve\n 'force' (optional) : (bool) force refresh and do not check cache\n\nReturns:\n (dict) Metadata dictionary from name claim, None if the name is not\n resolvable\n\n\n\n\nrouting_table_get\n\n\nGet DHT routing information\n\nArgs:\n None\n\nReturns:\n (dict) dictionary containing routing and contact information\n {\n \nbuckets\n: {\n \nbucket index\n: [\n {\n \naddress\n: (str) peer address,\n \nnode_id\n: (str) peer node id,\n \nblobs\n: (list) blob hashes announced by peer\n }\n ]\n },\n \ncontacts\n: (list) contact node ids,\n \nblob_hashes\n: (list) all of the blob hashes stored by peers in the list of buckets,\n \nnode_id\n: (str) the local dht node id\n }\n\n\n\n\nsettings_get\n\n\nGet daemon settings\n\nArgs:\n None\n\nReturns:\n (dict) Dictionary of daemon settings\n See ADJUSTABLE_SETTINGS in lbrynet/conf.py for full list of settings\n\n\n\n\nsettings_set\n\n\nSet daemon settings\n\nArgs:\n 'download_directory' (optional) : (str) path of download directory\n 'data_rate' (optional) : (float) 0.0001\n 'download_timeout' (optional) : (int) 180\n 'peer_port' (optional) : (int) 3333\n 'max_key_fee' (optional) : (dict) maximum key fee for downloads,\n in the format:\n {\n 'currency': \ncurrency_symbol\n,\n 'amount': \namount\n\n }.\n In the CLI, it must be an escaped JSON string\n Supported currency symbols: LBC, USD, BTC\n 'disable_max_key_fee' (optional) : (bool) False\n 'use_upnp' (optional) : (bool) True\n 'run_reflector_server' (optional) : (bool) False\n 'cache_time' (optional) : (int) 150\n 'reflect_uploads' (optional) : (bool) True\n 'share_usage_data' (optional) : (bool) True\n 'peer_search_timeout' (optional) : (int) 3\n 'sd_download_timeout' (optional) : (int) 3\n 'auto_renew_claim_height_delta' (optional) : (int) 0\n claims set to expire within this many blocks will be\n automatically renewed after startup (if set to 0, renews\n will not be made automatically)\n\nReturns:\n (dict) Updated dictionary of daemon settings\n\n\n\n\nstatus\n\n\nGet daemon status\n\nArgs:\n 'session_status' (optional) : (bool) include session status in results\n 'dht_status' (optional) : (bool) include dht network and peer status\n\nReturns:\n (dict) lbrynet-daemon status\n {\n 'lbry_id': lbry peer id, base58,\n 'installation_id': installation id, base58,\n 'is_running': bool,\n 'is_first_run': bool,\n 'startup_status': {\n 'code': status code,\n 'message': status message\n },\n 'connection_status': {\n 'code': connection status code,\n 'message': connection status message\n },\n 'blockchain_status': {\n 'blocks': local blockchain height,\n 'blocks_behind': remote_height - local_height,\n 'best_blockhash': block hash of most recent block,\n },\n 'wallet_is_encrypted': bool,\n\n If given the session status option:\n 'session_status': {\n 'managed_blobs': count of blobs in the blob manager,\n 'managed_streams': count of streams in the file manager\n 'announce_queue_size': number of blobs currently queued to be announced\n 'should_announce_blobs': number of blobs that should be announced\n }\n\n If given the dht status option:\n 'dht_status': {\n 'kbps_received': current kbps receiving,\n 'kbps_sent': current kdps being sent,\n 'total_bytes_sent': total bytes sent,\n 'total_bytes_received': total bytes received,\n 'queries_received': number of queries received per second,\n 'queries_sent': number of queries sent per second,\n 'recent_contacts': count of recently contacted peers,\n 'unique_contacts': count of unique peers\n },\n }\n\n\n\n\nstream_availability\n\n\nGet stream availability for lbry uri\n\nArgs:\n 'uri' : (str) check availability for this uri\n 'search_timeout' (optional) : (int) how long to search for peers for the blob\n in the dht\n 'search_timeout' (optional) : (int) how long to try downloading from a peer\n\nReturns:\n (dict) {\n 'is_available': \nbool\n,\n 'did_decode': \nbool\n,\n 'did_resolve': \nbool\n,\n 'is_stream': \nbool\n,\n 'num_blobs_in_stream': \nint\n,\n 'sd_hash': \nstr\n,\n 'sd_blob_availability': \ndict\n see `blob_availability`,\n 'head_blob_hash': \nstr\n,\n 'head_blob_availability': \ndict\n see `blob_availability`,\n 'use_upnp': \nbool\n,\n 'upnp_redirect_is_set': \nbool\n,\n 'error': \nNone\n | \nstr\n error message\n }\n\n\n\n\nstream_cost_estimate\n\n\nGet estimated cost for a lbry stream\n\nArgs:\n 'uri' : (str) uri to use\n 'size' (optional) : (float) stream size in bytes. if provided an sd blob won't be\n downloaded.\n\nReturns:\n (float) Estimated cost in lbry credits, returns None if uri is not\n resolvable\n\n\n\n\ntransaction_list\n\n\nList transactions belonging to wallet\n\nArgs:\n None\n\nReturns:\n (list) List of transactions\n\n {\n \nclaim_info\n: (list) claim info if in txn [{\n \naddress\n: (str) address of claim,\n \nbalance_delta\n: (float) bid amount,\n \namount\n: (float) claim amount,\n \nclaim_id\n: (str) claim id,\n \nclaim_name\n: (str) claim name,\n \nnout\n: (int) nout\n }],\n \nabandon_info\n: (list) abandon info if in txn [{\n \naddress\n: (str) address of abandoned claim,\n \nbalance_delta\n: (float) returned amount,\n \namount\n: (float) claim amount,\n \nclaim_id\n: (str) claim id,\n \nclaim_name\n: (str) claim name,\n \nnout\n: (int) nout\n }],\n \nconfirmations\n: (int) number of confirmations for the txn,\n \ndate\n: (str) date and time of txn,\n \nfee\n: (float) txn fee,\n \nsupport_info\n: (list) support info if in txn [{\n \naddress\n: (str) address of support,\n \nbalance_delta\n: (float) support amount,\n \namount\n: (float) support amount,\n \nclaim_id\n: (str) claim id,\n \nclaim_name\n: (str) claim name,\n \nis_tip\n: (bool),\n \nnout\n: (int) nout\n }],\n \ntimestamp\n: (int) timestamp,\n \ntxid\n: (str) txn id,\n \nupdate_info\n: (list) update info if in txn [{\n \naddress\n: (str) address of claim,\n \nbalance_delta\n: (float) credited/debited\n \namount\n: (float) absolute amount,\n \nclaim_id\n: (str) claim id,\n \nclaim_name\n: (str) claim name,\n \nnout\n: (int) nout\n }],\n \nvalue\n: (float) value of txn\n }\n\n\n\n\ntransaction_show\n\n\nGet a decoded transaction from a txid\n\nArgs:\n 'txid' : (str) txid of the transaction\n\nReturns:\n (dict) JSON formatted transaction\n\n\n\n\nutxo_list\n\n\nList unspent transaction outputs\n\nArgs:\n None\n\nReturns:\n (list) List of unspent transaction outputs (UTXOs)\n [\n {\n \naddress\n: (str) the output address\n \namount\n: (float) unspent amount\n \nheight\n: (int) block height\n \nis_claim\n: (bool) is the tx a claim\n \nis_coinbase\n: (bool) is the tx a coinbase tx\n \nis_support\n: (bool) is the tx a support\n \nis_update\n: (bool) is the tx an update\n \nnout\n: (int) nout of the output\n \ntxid\n: (str) txid of the output\n },\n ...\n ]\n\n\n\n\nversion\n\n\nGet lbry version information\n\nArgs:\n None\n\nReturns:\n (dict) Dictionary of lbry version information\n {\n 'build': (str) build type (e.g. \ndev\n, \nrc\n, \nrelease\n),\n 'ip': (str) remote ip, if available,\n 'lbrynet_version': (str) lbrynet_version,\n 'lbryum_version': (str) lbryum_version,\n 'lbryschema_version': (str) lbryschema_version,\n 'os_release': (str) os release string\n 'os_system': (str) os name\n 'platform': (str) platform string\n 'processor': (str) processor type,\n 'python_version': (str) python version,\n }\n\n\n\n\nwallet_balance\n\n\nReturn the balance of the wallet\n\nArgs:\n 'address' (optional) : (str) If provided only the balance for this\n address will be given\n 'include_unconfirmed' (optional) : (bool) Include unconfirmed\n\nReturns:\n (float) amount of lbry credits in wallet\n\n\n\n\nwallet_decrypt\n\n\nDecrypt an encrypted wallet, this will remove the wallet password\n\nArgs:\n None\n\nReturns:\n (bool) true if wallet is decrypted, otherwise false\n\n\n\n\nwallet_encrypt\n\n\nEncrypt a wallet with a password, if the wallet is already encrypted this will update\nthe password\n\nArgs:\n 'new_password' : (str) password string to be used for encrypting wallet\n\nReturns:\n (bool) true if wallet is decrypted, otherwise false\n\n\n\n\nwallet_is_address_mine\n\n\nChecks if an address is associated with the current wallet.\n\nArgs:\n 'address' : (str) address to check\n\nReturns:\n (bool) true, if address is associated with current wallet\n\n\n\n\nwallet_list\n\n\nList wallet addresses\n\nArgs:\n None\n\nReturns:\n List of wallet addresses\n\n\n\n\nwallet_new_address\n\n\nGenerate a new wallet address\n\nArgs:\n None\n\nReturns:\n (str) New wallet address in base58\n\n\n\n\nwallet_prefill_addresses\n\n\nCreate new addresses, each containing `amount` credits\n\nArgs:\n 'no_broadcast' (optional) : (bool) whether to broadcast or not\n 'num_addresses' : (int) num of addresses to create\n 'amount' : (float) initial amount in each address\n\nReturns:\n (dict) the resulting transaction\n\n\n\n\nwallet_public_key\n\n\nGet public key from wallet address\n\nArgs:\n 'address' : (str) address for which to get the public key\n\nReturns:\n (list) list of public keys associated with address.\n Could contain more than one public key if multisig.\n\n\n\n\nwallet_send\n\n\nSend credits. If given an address, send credits to it. If given a claim id, send a tip\nto the owner of a claim specified by uri. A tip is a claim support where the recipient\nof the support is the claim address for the claim being supported.\n\nArgs:\n 'amount' : (float) amount of credit to send\n 'address' : (str) address to send credits to\n 'claim_id' : (float) claim_id of the claim to send to tip to\n\nReturns:\n If sending to an address:\n (bool) true if payment successfully scheduled\n\n If sending a claim tip:\n (dict) Dictionary containing the result of the support\n {\n txid : (str) txid of resulting support claim\n nout : (int) nout of the resulting support claim\n fee : (float) fee paid for the transaction\n }\n\n\n\n\nwallet_unlock\n\n\nUnlock an encrypted wallet\n\nArgs:\n 'password' : (str) password for unlocking wallet\n\nReturns:\n (bool) true if wallet is unlocked, otherwise false\n\n\n\n\nwallet_unused_address\n\n\nReturn an address containing no balance, will create\na new address if there is none.\n\nArgs:\n None\n\nReturns:\n (str) Unused wallet address in base58", + "title": "API" + }, + { + "location": "/#lbry-json-rpc-api-documentation", + "text": "", + "title": "LBRY JSON-RPC API Documentation" + }, + { + "location": "/#blob_announce", + "text": "Announce blobs to the DHT\n\nArgs:\n 'announce_all' (optional) : (bool) announce all the blobs possessed by user\n 'blob_hash' (optional) : (str) announce a blob, specified by blob_hash\n 'stream_hash' (optional) : (str) announce all blobs associated with\n stream_hash\n 'sd_hash' (optional) : (str) announce all blobs associated with\n sd_hash and the sd_hash itself\n\nReturns:\n (bool) true if successful", + "title": "blob_announce" + }, + { + "location": "/#blob_availability", + "text": "Get blob availability\n\nArgs:\n 'blob_hash' (optional) : (str) check availability for this blob hash\n 'search_timeout' (optional) : (int) how long to search for peers for the blob\n in the dht\n 'blob_timeout' (optional) : (int) how long to try downloading from a peer\n\nReturns:\n (dict) {\n is_available : bool, true if blob is available from a peer from peer list \n reachable_peers : [ ip : port ],\n unreachable_peers : [ ip : port ]\n }", + "title": "blob_availability" + }, + { + "location": "/#blob_delete", + "text": "Delete a blob\n\nArgs:\n 'blob_hash' (optional) : (str) blob hash of the blob to delete\n\nReturns:\n (str) Success/fail message", + "title": "blob_delete" + }, + { + "location": "/#blob_get", + "text": "Download and return a blob\n\nArgs:\n 'blob_hash' : (str) blob hash of the blob to get\n 'timeout' (optional) : (int) timeout in number of seconds\n 'encoding' (optional) : (str) by default no attempt at decoding\n is made, can be set to one of the\n following decoders:\n 'json'\n 'payment_rate_manager' (optional) : (str) if not given the default payment rate\n manager will be used.\n supported alternative rate managers:\n 'only-free'\n\nReturns:\n (str) Success/Fail message or (dict) decoded data", + "title": "blob_get" + }, + { + "location": "/#blob_list", + "text": "Returns blob hashes. If not given filters, returns all blobs known by the blob manager\n\nArgs:\n 'needed' (optional) : (bool) only return needed blobs\n 'finished' (optional) : (bool) only return finished blobs\n 'uri' (optional) : (str) filter blobs by stream in a uri\n 'stream_hash' (optional) : (str) filter blobs by stream hash\n 'sd_hash' (optional) : (str) filter blobs by sd hash\n 'page_size' (optional) : (int) results page size\n 'page' (optional) : (int) page of results to return\n\nReturns:\n (list) List of blob hashes", + "title": "blob_list" + }, + { + "location": "/#blob_reflect_all", + "text": "Reflects all saved blobs\n\nArgs:\n None\n\nReturns:\n (bool) true if successful", + "title": "blob_reflect_all" + }, + { + "location": "/#block_show", + "text": "Get contents of a block\n\nArgs:\n 'blockhash' : (str) hash of the block to look up\n 'height' : (int) height of the block to look up\n\nReturns:\n (dict) Requested block", + "title": "block_show" + }, + { + "location": "/#channel_export", + "text": "Export serialized channel signing information for a given certificate claim id\n\nArgs:\n 'claim_id' : (str) Claim ID to export information about\n\nReturns:\n (str) Serialized certificate information", + "title": "channel_export" + }, + { + "location": "/#channel_import", + "text": "Import serialized channel signing information (to allow signing new claims to the channel)\n\nArgs:\n 'serialized_certificate_info' (optional) : (str) certificate info\n\nReturns:\n (dict) Result dictionary", + "title": "channel_import" + }, + { + "location": "/#channel_list", + "text": "Get certificate claim infos for channels that can be published to\n\nArgs:\n None\n\nReturns:\n (list) ClaimDict, includes 'is_mine' field to indicate if the certificate claim\n is in the wallet.", + "title": "channel_list" + }, + { + "location": "/#channel_new", + "text": "Generate a publisher key and create a new '@' prefixed certificate claim\n\nArgs:\n 'channel_name' : (str) name of the channel prefixed with '@'\n 'amount' : (float) bid amount on the channel\n\nReturns:\n (dict) Dictionary containing result of the claim\n {\n 'tx' : (str) hex encoded transaction\n 'txid' : (str) txid of resulting claim\n 'nout' : (int) nout of the resulting claim\n 'fee' : (float) fee paid for the claim transaction\n 'claim_id' : (str) claim ID of the resulting claim\n }", + "title": "channel_new" + }, + { + "location": "/#claim_abandon", + "text": "Abandon a name and reclaim credits from the claim\n\nArgs:\n 'claim_id' (optional) : (str) claim_id of the claim to abandon\n 'txid' (optional) : (str) txid of the claim to abandon\n 'nout' (optional) : (int) nout of the claim to abandon\n\nReturns:\n (dict) Dictionary containing result of the claim\n {\n txid : (str) txid of resulting transaction\n fee : (float) fee paid for the transaction\n }", + "title": "claim_abandon" + }, + { + "location": "/#claim_list", + "text": "List current claims and information about them for a given name\n\nArgs:\n 'name' : (str) name of the claim to list info about\n\nReturns:\n (dict) State of claims assigned for the name\n {\n 'claims': (list) list of claims for the name\n [\n {\n 'amount': (float) amount assigned to the claim\n 'effective_amount': (float) total amount assigned to the claim,\n including supports\n 'claim_id': (str) claim ID of the claim\n 'height': (int) height of block containing the claim\n 'txid': (str) txid of the claim\n 'nout': (int) nout of the claim\n 'permanent_url': (str) permanent url of the claim,\n 'supports': (list) a list of supports attached to the claim\n 'value': (str) the value of the claim\n },\n ]\n 'supports_without_claims': (list) supports without any claims attached to them\n 'last_takeover_height': (int) the height of last takeover for the name\n }", + "title": "claim_list" + }, + { + "location": "/#claim_list_by_channel", + "text": "Get paginated claims in a channel specified by a channel uri\n\nArgs:\n 'uri' : (str) uri of the channel\n 'uris' (optional) : (list) uris of the channel\n 'page' (optional) : (int) which page of results to return where page 1 is the first\n page, defaults to no pages\n 'page_size' (optional) : (int) number of results in a page, default of 10\n\nReturns:\n {\n resolved channel uri: {\n If there was an error:\n 'error': (str) error message\n\n 'claims_in_channel': the total number of results for the channel,\n\n If a page of results was requested:\n 'returned_page': page number returned,\n 'claims_in_channel': [\n {\n 'absolute_channel_position': (int) claim index number in sorted list of\n claims which assert to be part of the\n channel\n 'address': (str) claim address,\n 'amount': (float) claim amount,\n 'effective_amount': (float) claim amount including supports,\n 'claim_id': (str) claim id,\n 'claim_sequence': (int) claim sequence number,\n 'decoded_claim': (bool) whether or not the claim value was decoded,\n 'height': (int) claim height,\n 'depth': (int) claim depth,\n 'has_signature': (bool) included if decoded_claim\n 'name': (str) claim name,\n 'supports: (list) list of supports [{'txid': (str) txid,\n 'nout': (int) nout,\n 'amount': (float) amount}],\n 'txid': (str) claim txid,\n 'nout': (str) claim nout,\n 'signature_is_valid': (bool), included if has_signature,\n 'value': ClaimDict if decoded, otherwise hex string\n }\n ],\n }\n }", + "title": "claim_list_by_channel" + }, + { + "location": "/#claim_list_mine", + "text": "List my name claims\n\nArgs:\n None\n\nReturns:\n (list) List of name claims owned by user\n [\n {\n 'address': (str) address that owns the claim\n 'amount': (float) amount assigned to the claim\n 'blocks_to_expiration': (int) number of blocks until it expires\n 'category': (str) claim , update , or support \n 'claim_id': (str) claim ID of the claim\n 'confirmations': (int) number of blocks of confirmations for the claim\n 'expiration_height': (int) the block height which the claim will expire\n 'expired': (bool) true if expired, false otherwise\n 'height': (int) height of the block containing the claim\n 'is_spent': (bool) true if claim is abandoned, false otherwise\n 'name': (str) name of the claim\n 'permanent_url': (str) permanent url of the claim,\n 'txid': (str) txid of the cliam\n 'nout': (int) nout of the claim\n 'value': (str) value of the claim\n },\n ]", + "title": "claim_list_mine" + }, + { + "location": "/#claim_new_support", + "text": "Support a name claim\n\nArgs:\n 'name' : (str) name of the claim to support\n 'claim_id' : (str) claim_id of the claim to support\n 'amount' : (float) amount of support\n\nReturns:\n (dict) Dictionary containing result of the claim\n {\n txid : (str) txid of resulting support claim\n nout : (int) nout of the resulting support claim\n fee : (float) fee paid for the transaction\n }", + "title": "claim_new_support" + }, + { + "location": "/#claim_renew", + "text": "Renew claim(s) or support(s)\n\nArgs:\n 'outpoint' : (str) outpoint of the claim to renew\n 'height' : (str) update claims expiring before or at this block height\n\nReturns:\n (dict) Dictionary where key is the the original claim's outpoint and\n value is the result of the renewal\n {\n outpoint:{\n\n 'tx' : (str) hex encoded transaction\n 'txid' : (str) txid of resulting claim\n 'nout' : (int) nout of the resulting claim\n 'fee' : (float) fee paid for the claim transaction\n 'claim_id' : (str) claim ID of the resulting claim\n },\n }", + "title": "claim_renew" + }, + { + "location": "/#claim_send_to_address", + "text": "Send a name claim to an address\n\nArgs:\n 'claim_id' : (str) claim_id to send\n 'address' : (str) address to send the claim to\n 'amount' (optional) : (int) Amount of credits to claim name for, defaults to the current amount\n on the claim\n\nReturns:\n (dict) Dictionary containing result of the claim\n {\n 'tx' : (str) hex encoded transaction\n 'txid' : (str) txid of resulting claim\n 'nout' : (int) nout of the resulting claim\n 'fee' : (float) fee paid for the claim transaction\n 'claim_id' : (str) claim ID of the resulting claim\n }", + "title": "claim_send_to_address" + }, + { + "location": "/#claim_show", + "text": "Resolve claim info from txid/nout or with claim ID\n\nArgs:\n 'txid' (optional) : (str) look for claim with this txid, nout must\n also be specified\n 'nout' (optional) : (int) look for claim with this nout, txid must\n also be specified\n 'claim_id' (optional) : (str) look for claim with this claim id\n\nReturns:\n (dict) Dictionary containing claim info as below,\n\n {\n 'txid': (str) txid of claim\n 'nout': (int) nout of claim\n 'amount': (float) amount of claim\n 'value': (str) value of claim\n 'height' : (int) height of claim takeover\n 'claim_id': (str) claim ID of claim\n 'supports': (list) list of supports associated with claim\n }\n\n if claim cannot be resolved, dictionary as below will be returned\n\n {\n 'error': (str) reason for error\n }", + "title": "claim_show" + }, + { + "location": "/#cli_test_command", + "text": "This command is only for testing the CLI argument parsing\nArgs:\n 'a_arg' (optional) : a arg\n 'b_arg' (optional) : b arg\n 'pos_arg' : pos arg\n 'pos_args' (optional) : pos args\n 'pos_arg2' (optional) : pos arg 2\n 'pos_arg3' (optional) : pos arg 3\n\nReturns:\n pos args", + "title": "cli_test_command" + }, + { + "location": "/#commands", + "text": "Return a list of available commands\n\nArgs:\n None\n\nReturns:\n (list) list of available commands", + "title": "commands" + }, + { + "location": "/#daemon_stop", + "text": "Stop lbrynet-daemon\n\nArgs:\n None\n\nReturns:\n (string) Shutdown message", + "title": "daemon_stop" + }, + { + "location": "/#file_delete", + "text": "Delete a LBRY file\n\nArgs:\n 'delete_from_download_dir' (optional) : (bool) delete file from download directory,\n instead of just deleting blobs\n 'delete_all' (optional) : (bool) if there are multiple matching files,\n allow the deletion of multiple files.\n Otherwise do not delete anything.\n 'sd_hash' (optional) : (str) delete by file sd hash\n 'file_name' (optional) : (str) delete by file name in downloads folder\n 'stream_hash' (optional) : (str) delete by file stream hash\n 'rowid' (optional) : (int) delete by file row id\n 'claim_id' (optional) : (str) delete by file claim id\n 'txid' (optional) : (str) delete by file claim txid\n 'nout' (optional) : (int) delete by file claim nout\n 'claim_name' (optional) : (str) delete by file claim name\n 'channel_claim_id' (optional) : (str) delete by file channel claim id\n 'channel_name' (optional) : (str) delete by file channel claim name\n\nReturns:\n (bool) true if deletion was successful", + "title": "file_delete" + }, + { + "location": "/#file_list", + "text": "List files limited by optional filters\n\nArgs:\n 'sd_hash' (optional) : (str) get file with matching sd hash\n 'file_name' (optional) : (str) get file with matching file name in the\n downloads folder\n 'stream_hash' (optional) : (str) get file with matching stream hash\n 'rowid' (optional) : (int) get file with matching row id\n 'claim_id' (optional) : (str) get file with matching claim id\n 'outpoint' (optional) : (str) get file with matching claim outpoint\n 'txid' (optional) : (str) get file with matching claim txid\n 'nout' (optional) : (int) get file with matching claim nout\n 'channel_claim_id' (optional) : (str) get file with matching channel claim id\n 'channel_name' (optional) : (str) get file with matching channel name\n 'claim_name' (optional) : (str) get file with matching claim name\n 'full_status' (optional) : (bool) full status, populate the\n 'message' and 'size' fields\n\nReturns:\n (list) List of files\n\n [\n {\n 'completed': (bool) true if download is completed,\n 'file_name': (str) name of file,\n 'download_directory': (str) download directory,\n 'points_paid': (float) credit paid to download file,\n 'stopped': (bool) true if download is stopped,\n 'stream_hash': (str) stream hash of file,\n 'stream_name': (str) stream name ,\n 'suggested_file_name': (str) suggested file name,\n 'sd_hash': (str) sd hash of file,\n 'download_path': (str) download path of file,\n 'mime_type': (str) mime type of file,\n 'key': (str) key attached to file,\n 'total_bytes': (int) file size in bytes, None if full_status is false,\n 'written_bytes': (int) written size in bytes,\n 'blobs_completed': (int) num_completed, None if full_status is false,\n 'blobs_in_stream': (int) None if full_status is false,\n 'status': (str) downloader status, None if full_status is false,\n 'claim_id': (str) None if full_status is false or if claim is not found,\n 'outpoint': (str) None if full_status is false or if claim is not found,\n 'txid': (str) None if full_status is false or if claim is not found,\n 'nout': (int) None if full_status is false or if claim is not found,\n 'metadata': (dict) None if full_status is false or if claim is not found,\n 'channel_claim_id': (str) None if full_status is false or if claim is not found or signed,\n 'channel_name': (str) None if full_status is false or if claim is not found or signed,\n 'claim_name': (str) None if full_status is false or if claim is not found\n },\n ]", + "title": "file_list" + }, + { + "location": "/#file_reflect", + "text": "Reflect all the blobs in a file matching the filter criteria\n\nArgs:\n 'sd_hash' (optional) : (str) get file with matching sd hash\n 'file_name' (optional) : (str) get file with matching file name in the\n downloads folder\n 'stream_hash' (optional) : (str) get file with matching stream hash\n 'rowid' (optional) : (int) get file with matching row id\n 'reflector' (optional) : (str) reflector server, ip address or url\n by default choose a server from the config\n\nReturns:\n (list) list of blobs reflected", + "title": "file_reflect" + }, + { + "location": "/#file_set_status", + "text": "Start or stop downloading a file\n\nArgs:\n 'status' : (str) one of start or stop \n 'sd_hash' (optional) : (str) set status of file with matching sd hash\n 'file_name' (optional) : (str) set status of file with matching file name in the\n downloads folder\n 'stream_hash' (optional) : (str) set status of file with matching stream hash\n 'rowid' (optional) : (int) set status of file with matching row id\n\nReturns:\n (str) Confirmation message", + "title": "file_set_status" + }, + { + "location": "/#get", + "text": "Download stream from a LBRY name.\n\nArgs:\n 'uri' (optional) : (str) uri of the content to download\n 'file_name' (optional) : (str) specified name for the downloaded file\n 'timeout' (optional) : (int) download timeout in number of seconds\n\nReturns:\n (dict) Dictionary containing information about the stream\n {\n 'completed': (bool) true if download is completed,\n 'file_name': (str) name of file,\n 'download_directory': (str) download directory,\n 'points_paid': (float) credit paid to download file,\n 'stopped': (bool) true if download is stopped,\n 'stream_hash': (str) stream hash of file,\n 'stream_name': (str) stream name ,\n 'suggested_file_name': (str) suggested file name,\n 'sd_hash': (str) sd hash of file,\n 'download_path': (str) download path of file,\n 'mime_type': (str) mime type of file,\n 'key': (str) key attached to file,\n 'total_bytes': (int) file size in bytes, None if full_status is false,\n 'written_bytes': (int) written size in bytes,\n 'blobs_completed': (int) num_completed, None if full_status is false,\n 'blobs_in_stream': (int) None if full_status is false,\n 'status': (str) downloader status, None if full_status is false,\n 'claim_id': (str) claim id,\n 'outpoint': (str) claim outpoint string,\n 'txid': (str) claim txid,\n 'nout': (int) claim nout,\n 'metadata': (dict) claim metadata,\n 'channel_claim_id': (str) None if claim is not signed\n 'channel_name': (str) None if claim is not signed\n 'claim_name': (str) claim name\n }", + "title": "get" + }, + { + "location": "/#help", + "text": "Return a useful message for an API command\n\nArgs:\n 'command' (optional) : (str) command to retrieve documentation for\n\nReturns:\n (str) Help message", + "title": "help" + }, + { + "location": "/#peer_list", + "text": "Get peers for blob hash\n\nArgs:\n 'blob_hash' : (str) find available peers for this blob hash\n 'timeout' (optional) : (int) peer search timeout in seconds\n\nReturns:\n (list) List of contacts", + "title": "peer_list" + }, + { + "location": "/#publish", + "text": "Make a new name claim and publish associated data to lbrynet,\nupdate over existing claim if user already has a claim for name.\n\nFields required in the final Metadata are:\n 'title'\n 'description'\n 'author'\n 'language'\n 'license'\n 'nsfw'\n\nMetadata can be set by either using the metadata argument or by setting individual arguments\nfee, title, description, author, language, license, license_url, thumbnail, preview, nsfw,\nor sources. Individual arguments will overwrite the fields specified in metadata argument.\n\nArgs:\n 'name' : (str) name of the content\n 'bid' : (float) amount to back the claim\n 'metadata' (optional) : (dict) ClaimDict to associate with the claim.\n 'file_path' (optional) : (str) path to file to be associated with name. If provided,\n a lbry stream of this file will be used in 'sources'.\n If no path is given but a sources dict is provided,\n it will be used. If neither are provided, an\n error is raised.\n 'fee' (optional) : (dict) Dictionary representing key fee to download content:\n {\n 'currency': currency_symbol,\n 'amount': float,\n 'address': str, optional\n }\n supported currencies: LBC, USD, BTC\n If an address is not provided a new one will be\n automatically generated. Default fee is zero.\n 'title' (optional) : (str) title of the publication\n 'description' (optional) : (str) description of the publication\n 'author' (optional) : (str) author of the publication\n 'language' (optional) : (str) language of the publication\n 'license' (optional) : (str) publication license\n 'license_url' (optional) : (str) publication license url\n 'thumbnail' (optional) : (str) thumbnail url\n 'preview' (optional) : (str) preview url\n 'nsfw' (optional) : (bool) title of the publication\n 'sources' (optional) : (str) {'lbry_sd_hash': sd_hash} specifies sd hash of file\n 'channel_name' (optional) : (str) name of the publisher channel name in the wallet\n 'channel_id' (optional) : (str) claim id of the publisher channel, does not check\n for channel claim being in the wallet. This allows\n publishing to a channel where only the certificate\n private key is in the wallet.\n 'claim_address' (optional) : (str) address where the claim is sent to, if not specified\n new address wil automatically be created\n\nReturns:\n (dict) Dictionary containing result of the claim\n {\n 'tx' : (str) hex encoded transaction\n 'txid' : (str) txid of resulting claim\n 'nout' : (int) nout of the resulting claim\n 'fee' : (float) fee paid for the claim transaction\n 'claim_id' : (str) claim ID of the resulting claim\n }", + "title": "publish" + }, + { + "location": "/#report_bug", + "text": "Report a bug to slack\n\nArgs:\n 'message' : (str) Description of the bug\n\nReturns:\n (bool) true if successful", + "title": "report_bug" + }, + { + "location": "/#resolve", + "text": "Resolve given LBRY URIs\n\nArgs:\n 'force' (optional) : (bool) force refresh and ignore cache\n 'uri' : (str) uri to resolve\n 'uris' (optional) : (list) uris to resolve\n\nReturns:\n Dictionary of results, keyed by uri\n ' uri ': {\n If a resolution error occurs:\n 'error': Error message\n\n If the uri resolves to a channel or a claim in a channel:\n 'certificate': {\n 'address': (str) claim address,\n 'amount': (float) claim amount,\n 'effective_amount': (float) claim amount including supports,\n 'claim_id': (str) claim id,\n 'claim_sequence': (int) claim sequence number,\n 'decoded_claim': (bool) whether or not the claim value was decoded,\n 'height': (int) claim height,\n 'depth': (int) claim depth,\n 'has_signature': (bool) included if decoded_claim\n 'name': (str) claim name,\n 'permanent_url': (str) permanent url of the certificate claim,\n 'supports: (list) list of supports [{'txid': (str) txid,\n 'nout': (int) nout,\n 'amount': (float) amount}],\n 'txid': (str) claim txid,\n 'nout': (str) claim nout,\n 'signature_is_valid': (bool), included if has_signature,\n 'value': ClaimDict if decoded, otherwise hex string\n }\n\n If the uri resolves to a channel:\n 'claims_in_channel': (int) number of claims in the channel,\n\n If the uri resolves to a claim:\n 'claim': {\n 'address': (str) claim address,\n 'amount': (float) claim amount,\n 'effective_amount': (float) claim amount including supports,\n 'claim_id': (str) claim id,\n 'claim_sequence': (int) claim sequence number,\n 'decoded_claim': (bool) whether or not the claim value was decoded,\n 'height': (int) claim height,\n 'depth': (int) claim depth,\n 'has_signature': (bool) included if decoded_claim\n 'name': (str) claim name,\n 'permanent_url': (str) permanent url of the claim,\n 'channel_name': (str) channel name if claim is in a channel\n 'supports: (list) list of supports [{'txid': (str) txid,\n 'nout': (int) nout,\n 'amount': (float) amount}]\n 'txid': (str) claim txid,\n 'nout': (str) claim nout,\n 'signature_is_valid': (bool), included if has_signature,\n 'value': ClaimDict if decoded, otherwise hex string\n }\n }", + "title": "resolve" + }, + { + "location": "/#resolve_name", + "text": "Resolve stream info from a LBRY name\n\nArgs:\n 'name' : (str) the name to resolve\n 'force' (optional) : (bool) force refresh and do not check cache\n\nReturns:\n (dict) Metadata dictionary from name claim, None if the name is not\n resolvable", + "title": "resolve_name" + }, + { + "location": "/#routing_table_get", + "text": "Get DHT routing information\n\nArgs:\n None\n\nReturns:\n (dict) dictionary containing routing and contact information\n {\n buckets : {\n bucket index : [\n {\n address : (str) peer address,\n node_id : (str) peer node id,\n blobs : (list) blob hashes announced by peer\n }\n ]\n },\n contacts : (list) contact node ids,\n blob_hashes : (list) all of the blob hashes stored by peers in the list of buckets,\n node_id : (str) the local dht node id\n }", + "title": "routing_table_get" + }, + { + "location": "/#settings_get", + "text": "Get daemon settings\n\nArgs:\n None\n\nReturns:\n (dict) Dictionary of daemon settings\n See ADJUSTABLE_SETTINGS in lbrynet/conf.py for full list of settings", + "title": "settings_get" + }, + { + "location": "/#settings_set", + "text": "Set daemon settings\n\nArgs:\n 'download_directory' (optional) : (str) path of download directory\n 'data_rate' (optional) : (float) 0.0001\n 'download_timeout' (optional) : (int) 180\n 'peer_port' (optional) : (int) 3333\n 'max_key_fee' (optional) : (dict) maximum key fee for downloads,\n in the format:\n {\n 'currency': currency_symbol ,\n 'amount': amount \n }.\n In the CLI, it must be an escaped JSON string\n Supported currency symbols: LBC, USD, BTC\n 'disable_max_key_fee' (optional) : (bool) False\n 'use_upnp' (optional) : (bool) True\n 'run_reflector_server' (optional) : (bool) False\n 'cache_time' (optional) : (int) 150\n 'reflect_uploads' (optional) : (bool) True\n 'share_usage_data' (optional) : (bool) True\n 'peer_search_timeout' (optional) : (int) 3\n 'sd_download_timeout' (optional) : (int) 3\n 'auto_renew_claim_height_delta' (optional) : (int) 0\n claims set to expire within this many blocks will be\n automatically renewed after startup (if set to 0, renews\n will not be made automatically)\n\nReturns:\n (dict) Updated dictionary of daemon settings", + "title": "settings_set" + }, + { + "location": "/#status", + "text": "Get daemon status\n\nArgs:\n 'session_status' (optional) : (bool) include session status in results\n 'dht_status' (optional) : (bool) include dht network and peer status\n\nReturns:\n (dict) lbrynet-daemon status\n {\n 'lbry_id': lbry peer id, base58,\n 'installation_id': installation id, base58,\n 'is_running': bool,\n 'is_first_run': bool,\n 'startup_status': {\n 'code': status code,\n 'message': status message\n },\n 'connection_status': {\n 'code': connection status code,\n 'message': connection status message\n },\n 'blockchain_status': {\n 'blocks': local blockchain height,\n 'blocks_behind': remote_height - local_height,\n 'best_blockhash': block hash of most recent block,\n },\n 'wallet_is_encrypted': bool,\n\n If given the session status option:\n 'session_status': {\n 'managed_blobs': count of blobs in the blob manager,\n 'managed_streams': count of streams in the file manager\n 'announce_queue_size': number of blobs currently queued to be announced\n 'should_announce_blobs': number of blobs that should be announced\n }\n\n If given the dht status option:\n 'dht_status': {\n 'kbps_received': current kbps receiving,\n 'kbps_sent': current kdps being sent,\n 'total_bytes_sent': total bytes sent,\n 'total_bytes_received': total bytes received,\n 'queries_received': number of queries received per second,\n 'queries_sent': number of queries sent per second,\n 'recent_contacts': count of recently contacted peers,\n 'unique_contacts': count of unique peers\n },\n }", + "title": "status" + }, + { + "location": "/#stream_availability", + "text": "Get stream availability for lbry uri\n\nArgs:\n 'uri' : (str) check availability for this uri\n 'search_timeout' (optional) : (int) how long to search for peers for the blob\n in the dht\n 'search_timeout' (optional) : (int) how long to try downloading from a peer\n\nReturns:\n (dict) {\n 'is_available': bool ,\n 'did_decode': bool ,\n 'did_resolve': bool ,\n 'is_stream': bool ,\n 'num_blobs_in_stream': int ,\n 'sd_hash': str ,\n 'sd_blob_availability': dict see `blob_availability`,\n 'head_blob_hash': str ,\n 'head_blob_availability': dict see `blob_availability`,\n 'use_upnp': bool ,\n 'upnp_redirect_is_set': bool ,\n 'error': None | str error message\n }", + "title": "stream_availability" + }, + { + "location": "/#stream_cost_estimate", + "text": "Get estimated cost for a lbry stream\n\nArgs:\n 'uri' : (str) uri to use\n 'size' (optional) : (float) stream size in bytes. if provided an sd blob won't be\n downloaded.\n\nReturns:\n (float) Estimated cost in lbry credits, returns None if uri is not\n resolvable", + "title": "stream_cost_estimate" + }, + { + "location": "/#transaction_list", + "text": "List transactions belonging to wallet\n\nArgs:\n None\n\nReturns:\n (list) List of transactions\n\n {\n claim_info : (list) claim info if in txn [{\n address : (str) address of claim,\n balance_delta : (float) bid amount,\n amount : (float) claim amount,\n claim_id : (str) claim id,\n claim_name : (str) claim name,\n nout : (int) nout\n }],\n abandon_info : (list) abandon info if in txn [{\n address : (str) address of abandoned claim,\n balance_delta : (float) returned amount,\n amount : (float) claim amount,\n claim_id : (str) claim id,\n claim_name : (str) claim name,\n nout : (int) nout\n }],\n confirmations : (int) number of confirmations for the txn,\n date : (str) date and time of txn,\n fee : (float) txn fee,\n support_info : (list) support info if in txn [{\n address : (str) address of support,\n balance_delta : (float) support amount,\n amount : (float) support amount,\n claim_id : (str) claim id,\n claim_name : (str) claim name,\n is_tip : (bool),\n nout : (int) nout\n }],\n timestamp : (int) timestamp,\n txid : (str) txn id,\n update_info : (list) update info if in txn [{\n address : (str) address of claim,\n balance_delta : (float) credited/debited\n amount : (float) absolute amount,\n claim_id : (str) claim id,\n claim_name : (str) claim name,\n nout : (int) nout\n }],\n value : (float) value of txn\n }", + "title": "transaction_list" + }, + { + "location": "/#transaction_show", + "text": "Get a decoded transaction from a txid\n\nArgs:\n 'txid' : (str) txid of the transaction\n\nReturns:\n (dict) JSON formatted transaction", + "title": "transaction_show" + }, + { + "location": "/#utxo_list", + "text": "List unspent transaction outputs\n\nArgs:\n None\n\nReturns:\n (list) List of unspent transaction outputs (UTXOs)\n [\n {\n address : (str) the output address\n amount : (float) unspent amount\n height : (int) block height\n is_claim : (bool) is the tx a claim\n is_coinbase : (bool) is the tx a coinbase tx\n is_support : (bool) is the tx a support\n is_update : (bool) is the tx an update\n nout : (int) nout of the output\n txid : (str) txid of the output\n },\n ...\n ]", + "title": "utxo_list" + }, + { + "location": "/#version", + "text": "Get lbry version information\n\nArgs:\n None\n\nReturns:\n (dict) Dictionary of lbry version information\n {\n 'build': (str) build type (e.g. dev , rc , release ),\n 'ip': (str) remote ip, if available,\n 'lbrynet_version': (str) lbrynet_version,\n 'lbryum_version': (str) lbryum_version,\n 'lbryschema_version': (str) lbryschema_version,\n 'os_release': (str) os release string\n 'os_system': (str) os name\n 'platform': (str) platform string\n 'processor': (str) processor type,\n 'python_version': (str) python version,\n }", + "title": "version" + }, + { + "location": "/#wallet_balance", + "text": "Return the balance of the wallet\n\nArgs:\n 'address' (optional) : (str) If provided only the balance for this\n address will be given\n 'include_unconfirmed' (optional) : (bool) Include unconfirmed\n\nReturns:\n (float) amount of lbry credits in wallet", + "title": "wallet_balance" + }, + { + "location": "/#wallet_decrypt", + "text": "Decrypt an encrypted wallet, this will remove the wallet password\n\nArgs:\n None\n\nReturns:\n (bool) true if wallet is decrypted, otherwise false", + "title": "wallet_decrypt" + }, + { + "location": "/#wallet_encrypt", + "text": "Encrypt a wallet with a password, if the wallet is already encrypted this will update\nthe password\n\nArgs:\n 'new_password' : (str) password string to be used for encrypting wallet\n\nReturns:\n (bool) true if wallet is decrypted, otherwise false", + "title": "wallet_encrypt" + }, + { + "location": "/#wallet_is_address_mine", + "text": "Checks if an address is associated with the current wallet.\n\nArgs:\n 'address' : (str) address to check\n\nReturns:\n (bool) true, if address is associated with current wallet", + "title": "wallet_is_address_mine" + }, + { + "location": "/#wallet_list", + "text": "List wallet addresses\n\nArgs:\n None\n\nReturns:\n List of wallet addresses", + "title": "wallet_list" + }, + { + "location": "/#wallet_new_address", + "text": "Generate a new wallet address\n\nArgs:\n None\n\nReturns:\n (str) New wallet address in base58", + "title": "wallet_new_address" + }, + { + "location": "/#wallet_prefill_addresses", + "text": "Create new addresses, each containing `amount` credits\n\nArgs:\n 'no_broadcast' (optional) : (bool) whether to broadcast or not\n 'num_addresses' : (int) num of addresses to create\n 'amount' : (float) initial amount in each address\n\nReturns:\n (dict) the resulting transaction", + "title": "wallet_prefill_addresses" + }, + { + "location": "/#wallet_public_key", + "text": "Get public key from wallet address\n\nArgs:\n 'address' : (str) address for which to get the public key\n\nReturns:\n (list) list of public keys associated with address.\n Could contain more than one public key if multisig.", + "title": "wallet_public_key" + }, + { + "location": "/#wallet_send", + "text": "Send credits. If given an address, send credits to it. If given a claim id, send a tip\nto the owner of a claim specified by uri. A tip is a claim support where the recipient\nof the support is the claim address for the claim being supported.\n\nArgs:\n 'amount' : (float) amount of credit to send\n 'address' : (str) address to send credits to\n 'claim_id' : (float) claim_id of the claim to send to tip to\n\nReturns:\n If sending to an address:\n (bool) true if payment successfully scheduled\n\n If sending a claim tip:\n (dict) Dictionary containing the result of the support\n {\n txid : (str) txid of resulting support claim\n nout : (int) nout of the resulting support claim\n fee : (float) fee paid for the transaction\n }", + "title": "wallet_send" + }, + { + "location": "/#wallet_unlock", + "text": "Unlock an encrypted wallet\n\nArgs:\n 'password' : (str) password for unlocking wallet\n\nReturns:\n (bool) true if wallet is unlocked, otherwise false", + "title": "wallet_unlock" + }, + { + "location": "/#wallet_unused_address", + "text": "Return an address containing no balance, will create\na new address if there is none.\n\nArgs:\n None\n\nReturns:\n (str) Unused wallet address in base58", + "title": "wallet_unused_address" + }, + { + "location": "/cli/", + "text": "LBRY Command Line Documentation\n\n\nblob_announce\n\n\nAnnounce blobs to the DHT\n\nUsage:\n blob_announce [--announce_all] [\nblob_hash\n | --blob_hash=\nblob_hash\n]\n [\nstream_hash\n | --stream_hash=\nstream_hash\n]\n [\nsd_hash\n | --sd_hash=\nsd_hash\n]\n\n\nOptions:\n --announce_all=\nannounce_all\n : (bool) announce all the blobs possessed by user\n --blob_hash=\nblob_hash\n : (str) announce a blob, specified by blob_hash\n --stream_hash=\nstream_hash\n : (str) announce all blobs associated with\n stream_hash\n --sd_hash=\nsd_hash\n : (str) announce all blobs associated with\n sd_hash and the sd_hash itself\n\nReturns:\n (bool) true if successful\n\n\n\n\nblob_availability\n\n\nGet blob availability\n\nUsage:\n blob_availability (\nblob_hash\n) [\nsearch_timeout\n | --search_timeout=\nsearch_timeout\n]\n [\nblob_timeout\n | --blob_timeout=\nblob_timeout\n]\n\n\nOptions:\n --blob_hash=\nblob_hash\n : (str) check availability for this blob hash\n --search_timeout=\nsearch_timeout\n : (int) how long to search for peers for the blob\n in the dht\n --blob_timeout=\nblob_timeout\n : (int) how long to try downloading from a peer\n\nReturns:\n (dict) {\n \nis_available\n: \nbool, true if blob is available from a peer from peer list\n\n \nreachable_peers\n: [\nip\n:\nport\n],\n \nunreachable_peers\n: [\nip\n:\nport\n]\n }\n\n\n\n\nblob_delete\n\n\nDelete a blob\n\nUsage:\n blob_delete (\nblob_hash\n | --blob_hash=\nblob_hash)\n\n\nOptions:\n --blob_hash=\nblob_hash\n : (str) blob hash of the blob to delete\n\nReturns:\n (str) Success/fail message\n\n\n\n\nblob_get\n\n\nDownload and return a blob\n\nUsage:\n blob_get (\nblob_hash\n | --blob_hash=\nblob_hash\n) [--timeout=\ntimeout\n]\n [--encoding=\nencoding\n] [--payment_rate_manager=\npayment_rate_manager\n]\n\n\nOptions:\n --blob_hash=\nblob_hash\n : (str) blob hash of the blob to get\n --timeout=\ntimeout\n : (int) timeout in number of seconds\n --encoding=\nencoding\n : (str) by default no attempt at decoding\n is made, can be set to one of the\n following decoders:\n 'json'\n --payment_rate_manager=\npayment_rate_manager\n : (str) if not given the default payment rate\n manager will be used.\n supported alternative rate managers:\n 'only-free'\n\nReturns:\n (str) Success/Fail message or (dict) decoded data\n\n\n\n\nblob_list\n\n\nReturns blob hashes. If not given filters, returns all blobs known by the blob manager\n\nUsage:\n blob_list [--needed] [--finished] [\nuri\n | --uri=\nuri\n]\n [\nstream_hash\n | --stream_hash=\nstream_hash\n]\n [\nsd_hash\n | --sd_hash=\nsd_hash\n]\n [\npage_size\n | --page_size=\npage_size\n]\n [\npage\n | --page=\npage\n]\n\n\nOptions:\n --needed : (bool) only return needed blobs\n --finished : (bool) only return finished blobs\n --uri=\nuri\n : (str) filter blobs by stream in a uri\n --stream_hash=\nstream_hash\n : (str) filter blobs by stream hash\n --sd_hash=\nsd_hash\n : (str) filter blobs by sd hash\n --page_size=\npage_size\n : (int) results page size\n --page=\npage\n : (int) page of results to return\n\nReturns:\n (list) List of blob hashes\n\n\n\n\nblob_reflect_all\n\n\nReflects all saved blobs\n\nUsage:\n blob_reflect_all\n\n\nOptions:\n None\n\nReturns:\n (bool) true if successful\n\n\n\n\nblock_show\n\n\nGet contents of a block\n\nUsage:\n block_show (\nblockhash\n | --blockhash=\nblockhash\n) | (\nheight\n | --height=\nheight\n)\n\n\nOptions:\n --blockhash=\nblockhash\n : (str) hash of the block to look up\n --height=\nheight\n : (int) height of the block to look up\n\nReturns:\n (dict) Requested block\n\n\n\n\nchannel_export\n\n\nExport serialized channel signing information for a given certificate claim id\n\nUsage:\n channel_export (\nclaim_id\n | --claim_id=\nclaim_id\n)\n\n\nOptions:\n --claim_id=\nclaim_id\n : (str) Claim ID to export information about\n\nReturns:\n (str) Serialized certificate information\n\n\n\n\nchannel_import\n\n\nImport serialized channel signing information (to allow signing new claims to the channel)\n\nUsage:\n channel_import (\nserialized_certificate_info\n |\n --serialized_certificate_info=\nserialized_certificate_info\n)\n\n\nOptions:\n --serialized_certificate_info=\nserialized_certificate_info\n : (str) certificate info\n\nReturns:\n (dict) Result dictionary\n\n\n\n\nchannel_list\n\n\nGet certificate claim infos for channels that can be published to\n\nUsage:\n channel_list\n\n\nOptions:\n None\n\nReturns:\n (list) ClaimDict, includes 'is_mine' field to indicate if the certificate claim\n is in the wallet.\n\n\n\n\nchannel_new\n\n\nGenerate a publisher key and create a new '@' prefixed certificate claim\n\nUsage:\n channel_new (\nchannel_name\n | --channel_name=\nchannel_name\n)\n (\namount\n | --amount=\namount\n)\n\n\nOptions:\n --channel_name=\nchannel_name\n : (str) name of the channel prefixed with '@'\n --amount=\namount\n : (float) bid amount on the channel\n\nReturns:\n (dict) Dictionary containing result of the claim\n {\n 'tx' : (str) hex encoded transaction\n 'txid' : (str) txid of resulting claim\n 'nout' : (int) nout of the resulting claim\n 'fee' : (float) fee paid for the claim transaction\n 'claim_id' : (str) claim ID of the resulting claim\n }\n\n\n\n\nclaim_abandon\n\n\nAbandon a name and reclaim credits from the claim\n\nUsage:\n claim_abandon [\nclaim_id\n | --claim_id=\nclaim_id\n]\n [\ntxid\n | --txid=\ntxid\n] [\nnout\n | --nout=\nnout\n]\n\n\nOptions:\n --claim_id=\nclaim_id\n : (str) claim_id of the claim to abandon\n --txid=\ntxid\n : (str) txid of the claim to abandon\n --nout=\nnout\n : (int) nout of the claim to abandon\n\nReturns:\n (dict) Dictionary containing result of the claim\n {\n txid : (str) txid of resulting transaction\n fee : (float) fee paid for the transaction\n }\n\n\n\n\nclaim_list\n\n\nList current claims and information about them for a given name\n\nUsage:\n claim_list (\nname\n | --name=\nname\n)\n\n\nOptions:\n --name=\nname\n : (str) name of the claim to list info about\n\nReturns:\n (dict) State of claims assigned for the name\n {\n 'claims': (list) list of claims for the name\n [\n {\n 'amount': (float) amount assigned to the claim\n 'effective_amount': (float) total amount assigned to the claim,\n including supports\n 'claim_id': (str) claim ID of the claim\n 'height': (int) height of block containing the claim\n 'txid': (str) txid of the claim\n 'nout': (int) nout of the claim\n 'permanent_url': (str) permanent url of the claim,\n 'supports': (list) a list of supports attached to the claim\n 'value': (str) the value of the claim\n },\n ]\n 'supports_without_claims': (list) supports without any claims attached to them\n 'last_takeover_height': (int) the height of last takeover for the name\n }\n\n\n\n\nclaim_list_by_channel\n\n\nGet paginated claims in a channel specified by a channel uri\n\nUsage:\n claim_list_by_channel (\nuri\n | --uri=\nuri\n) [\nuris\n...] [--page=\npage\n]\n [--page_size=\npage_size\n]\n\n\nOptions:\n --uri=\nuri\n : (str) uri of the channel\n --uris=\nuris\n : (list) uris of the channel\n --page=\npage\n : (int) which page of results to return where page 1 is the first\n page, defaults to no pages\n --page_size=\npage_size\n : (int) number of results in a page, default of 10\n\nReturns:\n {\n resolved channel uri: {\n If there was an error:\n 'error': (str) error message\n\n 'claims_in_channel': the total number of results for the channel,\n\n If a page of results was requested:\n 'returned_page': page number returned,\n 'claims_in_channel': [\n {\n 'absolute_channel_position': (int) claim index number in sorted list of\n claims which assert to be part of the\n channel\n 'address': (str) claim address,\n 'amount': (float) claim amount,\n 'effective_amount': (float) claim amount including supports,\n 'claim_id': (str) claim id,\n 'claim_sequence': (int) claim sequence number,\n 'decoded_claim': (bool) whether or not the claim value was decoded,\n 'height': (int) claim height,\n 'depth': (int) claim depth,\n 'has_signature': (bool) included if decoded_claim\n 'name': (str) claim name,\n 'supports: (list) list of supports [{'txid': (str) txid,\n 'nout': (int) nout,\n 'amount': (float) amount}],\n 'txid': (str) claim txid,\n 'nout': (str) claim nout,\n 'signature_is_valid': (bool), included if has_signature,\n 'value': ClaimDict if decoded, otherwise hex string\n }\n ],\n }\n }\n\n\n\n\nclaim_list_mine\n\n\nList my name claims\n\nUsage:\n claim_list_mine\n\n\nOptions:\n None\n\nReturns:\n (list) List of name claims owned by user\n [\n {\n 'address': (str) address that owns the claim\n 'amount': (float) amount assigned to the claim\n 'blocks_to_expiration': (int) number of blocks until it expires\n 'category': (str) \nclaim\n, \nupdate\n , or \nsupport\n\n 'claim_id': (str) claim ID of the claim\n 'confirmations': (int) number of blocks of confirmations for the claim\n 'expiration_height': (int) the block height which the claim will expire\n 'expired': (bool) true if expired, false otherwise\n 'height': (int) height of the block containing the claim\n 'is_spent': (bool) true if claim is abandoned, false otherwise\n 'name': (str) name of the claim\n 'permanent_url': (str) permanent url of the claim,\n 'txid': (str) txid of the cliam\n 'nout': (int) nout of the claim\n 'value': (str) value of the claim\n },\n ]\n\n\n\n\nclaim_new_support\n\n\nSupport a name claim\n\nUsage:\n claim_new_support (\nname\n | --name=\nname\n) (\nclaim_id\n | --claim_id=\nclaim_id\n)\n (\namount\n | --amount=\namount\n)\n\n\nOptions:\n --name=\nname\n : (str) name of the claim to support\n --claim_id=\nclaim_id\n : (str) claim_id of the claim to support\n --amount=\namount\n : (float) amount of support\n\nReturns:\n (dict) Dictionary containing result of the claim\n {\n txid : (str) txid of resulting support claim\n nout : (int) nout of the resulting support claim\n fee : (float) fee paid for the transaction\n }\n\n\n\n\nclaim_renew\n\n\nRenew claim(s) or support(s)\n\nUsage:\n claim_renew (\noutpoint\n | --outpoint=\noutpoint\n) | (\nheight\n | --height=\nheight\n)\n\n\nOptions:\n --outpoint=\noutpoint\n : (str) outpoint of the claim to renew\n --height=\nheight\n : (str) update claims expiring before or at this block height\n\nReturns:\n (dict) Dictionary where key is the the original claim's outpoint and\n value is the result of the renewal\n {\n outpoint:{\n\n 'tx' : (str) hex encoded transaction\n 'txid' : (str) txid of resulting claim\n 'nout' : (int) nout of the resulting claim\n 'fee' : (float) fee paid for the claim transaction\n 'claim_id' : (str) claim ID of the resulting claim\n },\n }\n\n\n\n\nclaim_send_to_address\n\n\nSend a name claim to an address\n\nUsage:\n claim_send_to_address (\nclaim_id\n | --claim_id=\nclaim_id\n)\n (\naddress\n | --address=\naddress\n)\n [\namount\n | --amount=\namount\n]\n\n\nOptions:\n --claim_id=\nclaim_id\n : (str) claim_id to send\n --address=\naddress\n : (str) address to send the claim to\n --amount\namount\n : (int) Amount of credits to claim name for, defaults to the current amount\n on the claim\n\nReturns:\n (dict) Dictionary containing result of the claim\n {\n 'tx' : (str) hex encoded transaction\n 'txid' : (str) txid of resulting claim\n 'nout' : (int) nout of the resulting claim\n 'fee' : (float) fee paid for the claim transaction\n 'claim_id' : (str) claim ID of the resulting claim\n }\n\n\n\n\nclaim_show\n\n\nResolve claim info from txid/nout or with claim ID\n\nUsage:\n claim_show [\ntxid\n | --txid=\ntxid\n] [\nnout\n | --nout=\nnout\n]\n [\nclaim_id\n | --claim_id=\nclaim_id\n]\n\n\nOptions:\n --txid=\ntxid\n : (str) look for claim with this txid, nout must\n also be specified\n --nout=\nnout\n : (int) look for claim with this nout, txid must\n also be specified\n --claim_id=\nclaim_id\n : (str) look for claim with this claim id\n\nReturns:\n (dict) Dictionary containing claim info as below,\n\n {\n 'txid': (str) txid of claim\n 'nout': (int) nout of claim\n 'amount': (float) amount of claim\n 'value': (str) value of claim\n 'height' : (int) height of claim takeover\n 'claim_id': (str) claim ID of claim\n 'supports': (list) list of supports associated with claim\n }\n\n if claim cannot be resolved, dictionary as below will be returned\n\n {\n 'error': (str) reason for error\n }\n\n\n\n\ncli_test_command\n\n\nThis command is only for testing the CLI argument parsing\nUsage:\n cli_test_command [--a_arg] [--b_arg] (\npos_arg\n | --pos_arg=\npos_arg\n)\n [\npos_args\n...] [--pos_arg2=\npos_arg2\n]\n [--pos_arg3=\npos_arg3\n]\n\n\nOptions:\n --a_arg : a arg\n --b_arg : b arg\n --pos_arg=\npos_arg\n : pos arg\n --pos_args=\npos_args\n : pos args\n --pos_arg2=\npos_arg2\n : pos arg 2\n --pos_arg3=\npos_arg3\n : pos arg 3\n\nReturns:\n pos args\n\n\n\n\ncommands\n\n\nReturn a list of available commands\n\nUsage:\n commands\n\n\nOptions:\n None\n\nReturns:\n (list) list of available commands\n\n\n\n\ndaemon_stop\n\n\nStop lbrynet-daemon\n\nUsage:\n daemon_stop\n\n\nOptions:\n None\n\nReturns:\n (string) Shutdown message\n\n\n\n\nfile_delete\n\n\nDelete a LBRY file\n\nUsage:\n file_delete [--delete_from_download_dir] [--delete_all] [--sd_hash=\nsd_hash\n] [--file_name=\nfile_name\n]\n [--stream_hash=\nstream_hash\n] [--rowid=\nrowid\n] [--claim_id=\nclaim_id\n] [--txid=\ntxid\n]\n [--nout=\nnout\n] [--claim_name=\nclaim_name\n] [--channel_claim_id=\nchannel_claim_id\n]\n [--channel_name=\nchannel_name\n]\n\n\nOptions:\n --delete_from_download_dir : (bool) delete file from download directory,\n instead of just deleting blobs\n --delete_all : (bool) if there are multiple matching files,\n allow the deletion of multiple files.\n Otherwise do not delete anything.\n --sd_hash=\nsd_hash\n : (str) delete by file sd hash\n --file_name\nfile_name\n : (str) delete by file name in downloads folder\n --stream_hash=\nstream_hash\n : (str) delete by file stream hash\n --rowid=\nrowid\n : (int) delete by file row id\n --claim_id=\nclaim_id\n : (str) delete by file claim id\n --txid=\ntxid\n : (str) delete by file claim txid\n --nout=\nnout\n : (int) delete by file claim nout\n --claim_name=\nclaim_name\n : (str) delete by file claim name\n --channel_claim_id=\nchannel_claim_id\n : (str) delete by file channel claim id\n --channel_name=\nchannel_name\n : (str) delete by file channel claim name\n\nReturns:\n (bool) true if deletion was successful\n\n\n\n\nfile_list\n\n\nList files limited by optional filters\n\nUsage:\n file_list [--sd_hash=\nsd_hash\n] [--file_name=\nfile_name\n] [--stream_hash=\nstream_hash\n]\n [--rowid=\nrowid\n] [--claim_id=\nclaim_id\n] [--outpoint=\noutpoint\n] [--txid=\ntxid\n] [--nout=\nnout\n]\n [--channel_claim_id=\nchannel_claim_id\n] [--channel_name=\nchannel_name\n]\n [--claim_name=\nclaim_name\n] [--full_status]\n\n\nOptions:\n --sd_hash=\nsd_hash\n : (str) get file with matching sd hash\n --file_name=\nfile_name\n : (str) get file with matching file name in the\n downloads folder\n --stream_hash=\nstream_hash\n : (str) get file with matching stream hash\n --rowid=\nrowid\n : (int) get file with matching row id\n --claim_id=\nclaim_id\n : (str) get file with matching claim id\n --outpoint=\noutpoint\n : (str) get file with matching claim outpoint\n --txid=\ntxid\n : (str) get file with matching claim txid\n --nout=\nnout\n : (int) get file with matching claim nout\n --channel_claim_id=\nchannel_claim_id\n : (str) get file with matching channel claim id\n --channel_name=\nchannel_name\n : (str) get file with matching channel name\n --claim_name=\nclaim_name\n : (str) get file with matching claim name\n --full_status : (bool) full status, populate the\n 'message' and 'size' fields\n\nReturns:\n (list) List of files\n\n [\n {\n 'completed': (bool) true if download is completed,\n 'file_name': (str) name of file,\n 'download_directory': (str) download directory,\n 'points_paid': (float) credit paid to download file,\n 'stopped': (bool) true if download is stopped,\n 'stream_hash': (str) stream hash of file,\n 'stream_name': (str) stream name ,\n 'suggested_file_name': (str) suggested file name,\n 'sd_hash': (str) sd hash of file,\n 'download_path': (str) download path of file,\n 'mime_type': (str) mime type of file,\n 'key': (str) key attached to file,\n 'total_bytes': (int) file size in bytes, None if full_status is false,\n 'written_bytes': (int) written size in bytes,\n 'blobs_completed': (int) num_completed, None if full_status is false,\n 'blobs_in_stream': (int) None if full_status is false,\n 'status': (str) downloader status, None if full_status is false,\n 'claim_id': (str) None if full_status is false or if claim is not found,\n 'outpoint': (str) None if full_status is false or if claim is not found,\n 'txid': (str) None if full_status is false or if claim is not found,\n 'nout': (int) None if full_status is false or if claim is not found,\n 'metadata': (dict) None if full_status is false or if claim is not found,\n 'channel_claim_id': (str) None if full_status is false or if claim is not found or signed,\n 'channel_name': (str) None if full_status is false or if claim is not found or signed,\n 'claim_name': (str) None if full_status is false or if claim is not found\n },\n ]\n\n\n\n\nfile_reflect\n\n\nReflect all the blobs in a file matching the filter criteria\n\nUsage:\n file_reflect [--sd_hash=\nsd_hash\n] [--file_name=\nfile_name\n]\n [--stream_hash=\nstream_hash\n] [--rowid=\nrowid\n]\n [--reflector=\nreflector\n]\n\n\nOptions:\n --sd_hash=\nsd_hash\n : (str) get file with matching sd hash\n --file_name=\nfile_name\n : (str) get file with matching file name in the\n downloads folder\n --stream_hash=\nstream_hash\n : (str) get file with matching stream hash\n --rowid=\nrowid\n : (int) get file with matching row id\n --reflector=\nreflector\n : (str) reflector server, ip address or url\n by default choose a server from the config\n\nReturns:\n (list) list of blobs reflected\n\n\n\n\nfile_set_status\n\n\nStart or stop downloading a file\n\nUsage:\n file_set_status (\nstatus\n | --status=\nstatus\n) [--sd_hash=\nsd_hash\n]\n [--file_name=\nfile_name\n] [--stream_hash=\nstream_hash\n] [--rowid=\nrowid\n]\n\n\nOptions:\n --status=\nstatus\n : (str) one of \nstart\n or \nstop\n\n --sd_hash=\nsd_hash\n : (str) set status of file with matching sd hash\n --file_name=\nfile_name\n : (str) set status of file with matching file name in the\n downloads folder\n --stream_hash=\nstream_hash\n : (str) set status of file with matching stream hash\n --rowid=\nrowid\n : (int) set status of file with matching row id\n\nReturns:\n (str) Confirmation message\n\n\n\n\nget\n\n\nDownload stream from a LBRY name.\n\nUsage:\n get \nuri\n [\nfile_name\n | --file_name=\nfile_name\n] [\ntimeout\n | --timeout=\ntimeout\n]\n\n\n\nOptions:\n --uri=\nuri\n : (str) uri of the content to download\n --file_name=\nfile_name\n : (str) specified name for the downloaded file\n --timeout=\ntimeout\n : (int) download timeout in number of seconds\n\nReturns:\n (dict) Dictionary containing information about the stream\n {\n 'completed': (bool) true if download is completed,\n 'file_name': (str) name of file,\n 'download_directory': (str) download directory,\n 'points_paid': (float) credit paid to download file,\n 'stopped': (bool) true if download is stopped,\n 'stream_hash': (str) stream hash of file,\n 'stream_name': (str) stream name ,\n 'suggested_file_name': (str) suggested file name,\n 'sd_hash': (str) sd hash of file,\n 'download_path': (str) download path of file,\n 'mime_type': (str) mime type of file,\n 'key': (str) key attached to file,\n 'total_bytes': (int) file size in bytes, None if full_status is false,\n 'written_bytes': (int) written size in bytes,\n 'blobs_completed': (int) num_completed, None if full_status is false,\n 'blobs_in_stream': (int) None if full_status is false,\n 'status': (str) downloader status, None if full_status is false,\n 'claim_id': (str) claim id,\n 'outpoint': (str) claim outpoint string,\n 'txid': (str) claim txid,\n 'nout': (int) claim nout,\n 'metadata': (dict) claim metadata,\n 'channel_claim_id': (str) None if claim is not signed\n 'channel_name': (str) None if claim is not signed\n 'claim_name': (str) claim name\n }\n\n\n\n\nhelp\n\n\nReturn a useful message for an API command\n\nUsage:\n help [\ncommand\n | --command=\ncommand\n]\n\n\nOptions:\n --command=\ncommand\n : (str) command to retrieve documentation for\n\nReturns:\n (str) Help message\n\n\n\n\npeer_list\n\n\nGet peers for blob hash\n\nUsage:\n peer_list (\nblob_hash\n | --blob_hash=\nblob_hash\n) [\ntimeout\n | --timeout=\ntimeout\n]\n\n\nOptions:\n --blob_hash=\nblob_hash\n : (str) find available peers for this blob hash\n --timeout=\ntimeout\n : (int) peer search timeout in seconds\n\nReturns:\n (list) List of contacts\n\n\n\n\npublish\n\n\nMake a new name claim and publish associated data to lbrynet,\nupdate over existing claim if user already has a claim for name.\n\nFields required in the final Metadata are:\n 'title'\n 'description'\n 'author'\n 'language'\n 'license'\n 'nsfw'\n\nMetadata can be set by either using the metadata argument or by setting individual arguments\nfee, title, description, author, language, license, license_url, thumbnail, preview, nsfw,\nor sources. Individual arguments will overwrite the fields specified in metadata argument.\n\nUsage:\n publish (\nname\n | --name=\nname\n) (\nbid\n | --bid=\nbid\n) [--metadata=\nmetadata\n]\n [--file_path=\nfile_path\n] [--fee=\nfee\n] [--title=\ntitle\n]\n [--description=\ndescription\n] [--author=\nauthor\n] [--language=\nlanguage\n]\n [--license=\nlicense\n] [--license_url=\nlicense_url\n] [--thumbnail=\nthumbnail\n]\n [--preview=\npreview\n] [--nsfw=\nnsfw\n] [--sources=\nsources\n]\n [--channel_name=\nchannel_name\n] [--channel_id=\nchannel_id\n]\n [--claim_address=\nclaim_address\n] [--change_address=\nchange_address\n]\n\n\nOptions:\n --name=\nname\n : (str) name of the content\n --bid=\nbid\n : (float) amount to back the claim\n --metadata=\nmetadata\n : (dict) ClaimDict to associate with the claim.\n --file_path=\nfile_path\n : (str) path to file to be associated with name. If provided,\n a lbry stream of this file will be used in 'sources'.\n If no path is given but a sources dict is provided,\n it will be used. If neither are provided, an\n error is raised.\n --fee=\nfee\n : (dict) Dictionary representing key fee to download content:\n {\n 'currency': currency_symbol,\n 'amount': float,\n 'address': str, optional\n }\n supported currencies: LBC, USD, BTC\n If an address is not provided a new one will be\n automatically generated. Default fee is zero.\n --title=\ntitle\n : (str) title of the publication\n --description=\ndescription\n : (str) description of the publication\n --author=\nauthor\n : (str) author of the publication\n --language=\nlanguage\n : (str) language of the publication\n --license=\nlicense\n : (str) publication license\n --license_url=\nlicense_url\n : (str) publication license url\n --thumbnail=\nthumbnail\n : (str) thumbnail url\n --preview=\npreview\n : (str) preview url\n --nsfw=\nnsfw\n : (bool) title of the publication\n --sources=\nsources\n : (str) {'lbry_sd_hash': sd_hash} specifies sd hash of file\n --channel_name=\nchannel_name\n : (str) name of the publisher channel name in the wallet\n --channel_id=\nchannel_id\n : (str) claim id of the publisher channel, does not check\n for channel claim being in the wallet. This allows\n publishing to a channel where only the certificate\n private key is in the wallet.\n --claim_address=\nclaim_address\n : (str) address where the claim is sent to, if not specified\n new address wil automatically be created\n\nReturns:\n (dict) Dictionary containing result of the claim\n {\n 'tx' : (str) hex encoded transaction\n 'txid' : (str) txid of resulting claim\n 'nout' : (int) nout of the resulting claim\n 'fee' : (float) fee paid for the claim transaction\n 'claim_id' : (str) claim ID of the resulting claim\n }\n\n\n\n\nreport_bug\n\n\nReport a bug to slack\n\nUsage:\n report_bug (\nmessage\n | --message=\nmessage\n)\n\n\nOptions:\n --message=\nmessage\n : (str) Description of the bug\n\nReturns:\n (bool) true if successful\n\n\n\n\nresolve\n\n\nResolve given LBRY URIs\n\nUsage:\n resolve [--force] (\nuri\n | --uri=\nuri\n) [\nuris\n...]\n\n\nOptions:\n --force : (bool) force refresh and ignore cache\n --uri=\nuri\n : (str) uri to resolve\n --uris=\nuris\n : (list) uris to resolve\n\nReturns:\n Dictionary of results, keyed by uri\n '\nuri\n': {\n If a resolution error occurs:\n 'error': Error message\n\n If the uri resolves to a channel or a claim in a channel:\n 'certificate': {\n 'address': (str) claim address,\n 'amount': (float) claim amount,\n 'effective_amount': (float) claim amount including supports,\n 'claim_id': (str) claim id,\n 'claim_sequence': (int) claim sequence number,\n 'decoded_claim': (bool) whether or not the claim value was decoded,\n 'height': (int) claim height,\n 'depth': (int) claim depth,\n 'has_signature': (bool) included if decoded_claim\n 'name': (str) claim name,\n 'permanent_url': (str) permanent url of the certificate claim,\n 'supports: (list) list of supports [{'txid': (str) txid,\n 'nout': (int) nout,\n 'amount': (float) amount}],\n 'txid': (str) claim txid,\n 'nout': (str) claim nout,\n 'signature_is_valid': (bool), included if has_signature,\n 'value': ClaimDict if decoded, otherwise hex string\n }\n\n If the uri resolves to a channel:\n 'claims_in_channel': (int) number of claims in the channel,\n\n If the uri resolves to a claim:\n 'claim': {\n 'address': (str) claim address,\n 'amount': (float) claim amount,\n 'effective_amount': (float) claim amount including supports,\n 'claim_id': (str) claim id,\n 'claim_sequence': (int) claim sequence number,\n 'decoded_claim': (bool) whether or not the claim value was decoded,\n 'height': (int) claim height,\n 'depth': (int) claim depth,\n 'has_signature': (bool) included if decoded_claim\n 'name': (str) claim name,\n 'permanent_url': (str) permanent url of the claim,\n 'channel_name': (str) channel name if claim is in a channel\n 'supports: (list) list of supports [{'txid': (str) txid,\n 'nout': (int) nout,\n 'amount': (float) amount}]\n 'txid': (str) claim txid,\n 'nout': (str) claim nout,\n 'signature_is_valid': (bool), included if has_signature,\n 'value': ClaimDict if decoded, otherwise hex string\n }\n }\n\n\n\n\nresolve_name\n\n\nResolve stream info from a LBRY name\n\nUsage:\n resolve_name (\nname\n | --name=\nname\n) [--force]\n\n\nOptions:\n --name=\nname\n : (str) the name to resolve\n --force : (bool) force refresh and do not check cache\n\nReturns:\n (dict) Metadata dictionary from name claim, None if the name is not\n resolvable\n\n\n\n\nrouting_table_get\n\n\nGet DHT routing information\n\nUsage:\n routing_table_get\n\n\nOptions:\n None\n\nReturns:\n (dict) dictionary containing routing and contact information\n {\n \nbuckets\n: {\n \nbucket index\n: [\n {\n \naddress\n: (str) peer address,\n \nnode_id\n: (str) peer node id,\n \nblobs\n: (list) blob hashes announced by peer\n }\n ]\n },\n \ncontacts\n: (list) contact node ids,\n \nblob_hashes\n: (list) all of the blob hashes stored by peers in the list of buckets,\n \nnode_id\n: (str) the local dht node id\n }\n\n\n\n\nsettings_get\n\n\nGet daemon settings\n\nUsage:\n settings_get\n\n\nOptions:\n None\n\nReturns:\n (dict) Dictionary of daemon settings\n See ADJUSTABLE_SETTINGS in lbrynet/conf.py for full list of settings\n\n\n\n\nsettings_set\n\n\nSet daemon settings\n\nUsage:\n settings_set [--download_directory=\ndownload_directory\n]\n [--data_rate=\ndata_rate\n]\n [--download_timeout=\ndownload_timeout\n]\n [--peer_port=\npeer_port\n]\n [--max_key_fee=\nmax_key_fee\n]\n [--disable_max_key_fee=\ndisable_max_key_fee\n]\n [--use_upnp=\nuse_upnp\n]\n [--run_reflector_server=\nrun_reflector_server\n]\n [--cache_time=\ncache_time\n]\n [--reflect_uploads=\nreflect_uploads\n]\n [--share_usage_data=\nshare_usage_data\n]\n [--peer_search_timeout=\npeer_search_timeout\n]\n [--sd_download_timeout=\nsd_download_timeout\n]\n [--auto_renew_claim_height_delta=\nauto_renew_claim_height_delta\n]\n\n\nOptions:\n --download_directory=\ndownload_directory\n : (str) path of download directory\n --data_rate=\ndata_rate\n : (float) 0.0001\n --download_timeout=\ndownload_timeout\n : (int) 180\n --peer_port=\npeer_port\n : (int) 3333\n --max_key_fee=\nmax_key_fee\n : (dict) maximum key fee for downloads,\n in the format:\n {\n 'currency': \ncurrency_symbol\n,\n 'amount': \namount\n\n }.\n In the CLI, it must be an escaped JSON string\n Supported currency symbols: LBC, USD, BTC\n --disable_max_key_fee=\ndisable_max_key_fee\n : (bool) False\n --use_upnp=\nuse_upnp\n : (bool) True\n --run_reflector_server=\nrun_reflector_server\n : (bool) False\n --cache_time=\ncache_time\n : (int) 150\n --reflect_uploads=\nreflect_uploads\n : (bool) True\n --share_usage_data=\nshare_usage_data\n : (bool) True\n --peer_search_timeout=\npeer_search_timeout\n : (int) 3\n --sd_download_timeout=\nsd_download_timeout\n : (int) 3\n --auto_renew_claim_height_delta=\nauto_renew_claim_height_delta\n : (int) 0\n claims set to expire within this many blocks will be\n automatically renewed after startup (if set to 0, renews\n will not be made automatically)\n\nReturns:\n (dict) Updated dictionary of daemon settings\n\n\n\n\nstatus\n\n\nGet daemon status\n\nUsage:\n status [--session_status] [--dht_status]\n\n\nOptions:\n --session_status : (bool) include session status in results\n --dht_status : (bool) include dht network and peer status\n\nReturns:\n (dict) lbrynet-daemon status\n {\n 'lbry_id': lbry peer id, base58,\n 'installation_id': installation id, base58,\n 'is_running': bool,\n 'is_first_run': bool,\n 'startup_status': {\n 'code': status code,\n 'message': status message\n },\n 'connection_status': {\n 'code': connection status code,\n 'message': connection status message\n },\n 'blockchain_status': {\n 'blocks': local blockchain height,\n 'blocks_behind': remote_height - local_height,\n 'best_blockhash': block hash of most recent block,\n },\n 'wallet_is_encrypted': bool,\n\n If given the session status option:\n 'session_status': {\n 'managed_blobs': count of blobs in the blob manager,\n 'managed_streams': count of streams in the file manager\n 'announce_queue_size': number of blobs currently queued to be announced\n 'should_announce_blobs': number of blobs that should be announced\n }\n\n If given the dht status option:\n 'dht_status': {\n 'kbps_received': current kbps receiving,\n 'kbps_sent': current kdps being sent,\n 'total_bytes_sent': total bytes sent,\n 'total_bytes_received': total bytes received,\n 'queries_received': number of queries received per second,\n 'queries_sent': number of queries sent per second,\n 'recent_contacts': count of recently contacted peers,\n 'unique_contacts': count of unique peers\n },\n }\n\n\n\n\nstream_availability\n\n\nGet stream availability for lbry uri\n\nUsage:\n stream_availability (\nuri\n | --uri=\nuri\n)\n [\nsearch_timeout\n | --search_timeout=\nsearch_timeout\n]\n [\nblob_timeout\n | --blob_timeout=\nblob_timeout\n]\n\n\nOptions:\n --uri=\nuri\n : (str) check availability for this uri\n --search_timeout=\nsearch_timeout\n : (int) how long to search for peers for the blob\n in the dht\n --search_timeout=\nblob_timeout\n : (int) how long to try downloading from a peer\n\nReturns:\n (dict) {\n 'is_available': \nbool\n,\n 'did_decode': \nbool\n,\n 'did_resolve': \nbool\n,\n 'is_stream': \nbool\n,\n 'num_blobs_in_stream': \nint\n,\n 'sd_hash': \nstr\n,\n 'sd_blob_availability': \ndict\n see `blob_availability`,\n 'head_blob_hash': \nstr\n,\n 'head_blob_availability': \ndict\n see `blob_availability`,\n 'use_upnp': \nbool\n,\n 'upnp_redirect_is_set': \nbool\n,\n 'error': \nNone\n | \nstr\n error message\n }\n\n\n\n\nstream_cost_estimate\n\n\nGet estimated cost for a lbry stream\n\nUsage:\n stream_cost_estimate (\nuri\n | --uri=\nuri\n) [\nsize\n | --size=\nsize\n]\n\n\nOptions:\n --uri=\nuri\n : (str) uri to use\n --size=\nsize\n : (float) stream size in bytes. if provided an sd blob won't be\n downloaded.\n\nReturns:\n (float) Estimated cost in lbry credits, returns None if uri is not\n resolvable\n\n\n\n\ntransaction_list\n\n\nList transactions belonging to wallet\n\nUsage:\n transaction_list\n\n\nOptions:\n None\n\nReturns:\n (list) List of transactions\n\n {\n \nclaim_info\n: (list) claim info if in txn [{\n \naddress\n: (str) address of claim,\n \nbalance_delta\n: (float) bid amount,\n \namount\n: (float) claim amount,\n \nclaim_id\n: (str) claim id,\n \nclaim_name\n: (str) claim name,\n \nnout\n: (int) nout\n }],\n \nabandon_info\n: (list) abandon info if in txn [{\n \naddress\n: (str) address of abandoned claim,\n \nbalance_delta\n: (float) returned amount,\n \namount\n: (float) claim amount,\n \nclaim_id\n: (str) claim id,\n \nclaim_name\n: (str) claim name,\n \nnout\n: (int) nout\n }],\n \nconfirmations\n: (int) number of confirmations for the txn,\n \ndate\n: (str) date and time of txn,\n \nfee\n: (float) txn fee,\n \nsupport_info\n: (list) support info if in txn [{\n \naddress\n: (str) address of support,\n \nbalance_delta\n: (float) support amount,\n \namount\n: (float) support amount,\n \nclaim_id\n: (str) claim id,\n \nclaim_name\n: (str) claim name,\n \nis_tip\n: (bool),\n \nnout\n: (int) nout\n }],\n \ntimestamp\n: (int) timestamp,\n \ntxid\n: (str) txn id,\n \nupdate_info\n: (list) update info if in txn [{\n \naddress\n: (str) address of claim,\n \nbalance_delta\n: (float) credited/debited\n \namount\n: (float) absolute amount,\n \nclaim_id\n: (str) claim id,\n \nclaim_name\n: (str) claim name,\n \nnout\n: (int) nout\n }],\n \nvalue\n: (float) value of txn\n }\n\n\n\n\ntransaction_show\n\n\nGet a decoded transaction from a txid\n\nUsage:\n transaction_show (\ntxid\n | --txid=\ntxid\n)\n\n\nOptions:\n --txid=\ntxid\n : (str) txid of the transaction\n\nReturns:\n (dict) JSON formatted transaction\n\n\n\n\nutxo_list\n\n\nList unspent transaction outputs\n\nUsage:\n utxo_list\n\n\nOptions:\n None\n\nReturns:\n (list) List of unspent transaction outputs (UTXOs)\n [\n {\n \naddress\n: (str) the output address\n \namount\n: (float) unspent amount\n \nheight\n: (int) block height\n \nis_claim\n: (bool) is the tx a claim\n \nis_coinbase\n: (bool) is the tx a coinbase tx\n \nis_support\n: (bool) is the tx a support\n \nis_update\n: (bool) is the tx an update\n \nnout\n: (int) nout of the output\n \ntxid\n: (str) txid of the output\n },\n ...\n ]\n\n\n\n\nversion\n\n\nGet lbry version information\n\nUsage:\n version\n\n\nOptions:\n None\n\nReturns:\n (dict) Dictionary of lbry version information\n {\n 'build': (str) build type (e.g. \ndev\n, \nrc\n, \nrelease\n),\n 'ip': (str) remote ip, if available,\n 'lbrynet_version': (str) lbrynet_version,\n 'lbryum_version': (str) lbryum_version,\n 'lbryschema_version': (str) lbryschema_version,\n 'os_release': (str) os release string\n 'os_system': (str) os name\n 'platform': (str) platform string\n 'processor': (str) processor type,\n 'python_version': (str) python version,\n }\n\n\n\n\nwallet_balance\n\n\nReturn the balance of the wallet\n\nUsage:\n wallet_balance [\naddress\n | --address=\naddress\n] [--include_unconfirmed]\n\n\nOptions:\n --address=\naddress\n : (str) If provided only the balance for this\n address will be given\n --include_unconfirmed : (bool) Include unconfirmed\n\nReturns:\n (float) amount of lbry credits in wallet\n\n\n\n\nwallet_decrypt\n\n\nDecrypt an encrypted wallet, this will remove the wallet password\n\nUsage:\n wallet_decrypt\n\n\nOptions:\n None\n\nReturns:\n (bool) true if wallet is decrypted, otherwise false\n\n\n\n\nwallet_encrypt\n\n\nEncrypt a wallet with a password, if the wallet is already encrypted this will update\nthe password\n\nUsage:\n wallet_encrypt (\nnew_password\n | --new_password=\nnew_password\n)\n\n\nOptions:\n --new_password=\nnew_password\n : (str) password string to be used for encrypting wallet\n\nReturns:\n (bool) true if wallet is decrypted, otherwise false\n\n\n\n\nwallet_is_address_mine\n\n\nChecks if an address is associated with the current wallet.\n\nUsage:\n wallet_is_address_mine (\naddress\n | --address=\naddress\n)\n\n\nOptions:\n --address=\naddress\n : (str) address to check\n\nReturns:\n (bool) true, if address is associated with current wallet\n\n\n\n\nwallet_list\n\n\nList wallet addresses\n\nUsage:\n wallet_list\n\n\nOptions:\n None\n\nReturns:\n List of wallet addresses\n\n\n\n\nwallet_new_address\n\n\nGenerate a new wallet address\n\nUsage:\n wallet_new_address\n\n\nOptions:\n None\n\nReturns:\n (str) New wallet address in base58\n\n\n\n\nwallet_prefill_addresses\n\n\nCreate new addresses, each containing `amount` credits\n\nUsage:\n wallet_prefill_addresses [--no_broadcast]\n (\nnum_addresses\n | --num_addresses=\nnum_addresses\n)\n (\namount\n | --amount=\namount\n)\n\n\nOptions:\n --no_broadcast : (bool) whether to broadcast or not\n --num_addresses=\nnum_addresses\n : (int) num of addresses to create\n --amount=\namount\n : (float) initial amount in each address\n\nReturns:\n (dict) the resulting transaction\n\n\n\n\nwallet_public_key\n\n\nGet public key from wallet address\n\nUsage:\n wallet_public_key (\naddress\n | --address=\naddress\n)\n\n\nOptions:\n --address=\naddress\n : (str) address for which to get the public key\n\nReturns:\n (list) list of public keys associated with address.\n Could contain more than one public key if multisig.\n\n\n\n\nwallet_send\n\n\nSend credits. If given an address, send credits to it. If given a claim id, send a tip\nto the owner of a claim specified by uri. A tip is a claim support where the recipient\nof the support is the claim address for the claim being supported.\n\nUsage:\n wallet_send (\namount\n | --amount=\namount\n)\n ((\naddress\n | --address=\naddress\n) | (\nclaim_id\n | --claim_id=\nclaim_id\n))\n\n\nOptions:\n --amount=\namount\n : (float) amount of credit to send\n --address=\naddress\n : (str) address to send credits to\n --claim_id=\nclaim_id\n : (float) claim_id of the claim to send to tip to\n\nReturns:\n If sending to an address:\n (bool) true if payment successfully scheduled\n\n If sending a claim tip:\n (dict) Dictionary containing the result of the support\n {\n txid : (str) txid of resulting support claim\n nout : (int) nout of the resulting support claim\n fee : (float) fee paid for the transaction\n }\n\n\n\n\nwallet_unlock\n\n\nUnlock an encrypted wallet\n\nUsage:\n wallet_unlock (\npassword\n | --password=\npassword\n)\n\n\nOptions:\n --password=\npassword\n : (str) password for unlocking wallet\n\nReturns:\n (bool) true if wallet is unlocked, otherwise false\n\n\n\n\nwallet_unused_address\n\n\nReturn an address containing no balance, will create\na new address if there is none.\n\nUsage:\n wallet_unused_address\n\n\nOptions:\n None\n\nReturns:\n (str) Unused wallet address in base58", + "title": "CLI" + }, + { + "location": "/cli/#lbry-command-line-documentation", + "text": "", + "title": "LBRY Command Line Documentation" + }, + { + "location": "/cli/#blob_announce", + "text": "Announce blobs to the DHT\n\nUsage:\n blob_announce [--announce_all] [ blob_hash | --blob_hash= blob_hash ]\n [ stream_hash | --stream_hash= stream_hash ]\n [ sd_hash | --sd_hash= sd_hash ]\n\n\nOptions:\n --announce_all= announce_all : (bool) announce all the blobs possessed by user\n --blob_hash= blob_hash : (str) announce a blob, specified by blob_hash\n --stream_hash= stream_hash : (str) announce all blobs associated with\n stream_hash\n --sd_hash= sd_hash : (str) announce all blobs associated with\n sd_hash and the sd_hash itself\n\nReturns:\n (bool) true if successful", + "title": "blob_announce" + }, + { + "location": "/cli/#blob_availability", + "text": "Get blob availability\n\nUsage:\n blob_availability ( blob_hash ) [ search_timeout | --search_timeout= search_timeout ]\n [ blob_timeout | --blob_timeout= blob_timeout ]\n\n\nOptions:\n --blob_hash= blob_hash : (str) check availability for this blob hash\n --search_timeout= search_timeout : (int) how long to search for peers for the blob\n in the dht\n --blob_timeout= blob_timeout : (int) how long to try downloading from a peer\n\nReturns:\n (dict) {\n is_available : bool, true if blob is available from a peer from peer list \n reachable_peers : [ ip : port ],\n unreachable_peers : [ ip : port ]\n }", + "title": "blob_availability" + }, + { + "location": "/cli/#blob_delete", + "text": "Delete a blob\n\nUsage:\n blob_delete ( blob_hash | --blob_hash= blob_hash)\n\n\nOptions:\n --blob_hash= blob_hash : (str) blob hash of the blob to delete\n\nReturns:\n (str) Success/fail message", + "title": "blob_delete" + }, + { + "location": "/cli/#blob_get", + "text": "Download and return a blob\n\nUsage:\n blob_get ( blob_hash | --blob_hash= blob_hash ) [--timeout= timeout ]\n [--encoding= encoding ] [--payment_rate_manager= payment_rate_manager ]\n\n\nOptions:\n --blob_hash= blob_hash : (str) blob hash of the blob to get\n --timeout= timeout : (int) timeout in number of seconds\n --encoding= encoding : (str) by default no attempt at decoding\n is made, can be set to one of the\n following decoders:\n 'json'\n --payment_rate_manager= payment_rate_manager : (str) if not given the default payment rate\n manager will be used.\n supported alternative rate managers:\n 'only-free'\n\nReturns:\n (str) Success/Fail message or (dict) decoded data", + "title": "blob_get" + }, + { + "location": "/cli/#blob_list", + "text": "Returns blob hashes. If not given filters, returns all blobs known by the blob manager\n\nUsage:\n blob_list [--needed] [--finished] [ uri | --uri= uri ]\n [ stream_hash | --stream_hash= stream_hash ]\n [ sd_hash | --sd_hash= sd_hash ]\n [ page_size | --page_size= page_size ]\n [ page | --page= page ]\n\n\nOptions:\n --needed : (bool) only return needed blobs\n --finished : (bool) only return finished blobs\n --uri= uri : (str) filter blobs by stream in a uri\n --stream_hash= stream_hash : (str) filter blobs by stream hash\n --sd_hash= sd_hash : (str) filter blobs by sd hash\n --page_size= page_size : (int) results page size\n --page= page : (int) page of results to return\n\nReturns:\n (list) List of blob hashes", + "title": "blob_list" + }, + { + "location": "/cli/#blob_reflect_all", + "text": "Reflects all saved blobs\n\nUsage:\n blob_reflect_all\n\n\nOptions:\n None\n\nReturns:\n (bool) true if successful", + "title": "blob_reflect_all" + }, + { + "location": "/cli/#block_show", + "text": "Get contents of a block\n\nUsage:\n block_show ( blockhash | --blockhash= blockhash ) | ( height | --height= height )\n\n\nOptions:\n --blockhash= blockhash : (str) hash of the block to look up\n --height= height : (int) height of the block to look up\n\nReturns:\n (dict) Requested block", + "title": "block_show" + }, + { + "location": "/cli/#channel_export", + "text": "Export serialized channel signing information for a given certificate claim id\n\nUsage:\n channel_export ( claim_id | --claim_id= claim_id )\n\n\nOptions:\n --claim_id= claim_id : (str) Claim ID to export information about\n\nReturns:\n (str) Serialized certificate information", + "title": "channel_export" + }, + { + "location": "/cli/#channel_import", + "text": "Import serialized channel signing information (to allow signing new claims to the channel)\n\nUsage:\n channel_import ( serialized_certificate_info |\n --serialized_certificate_info= serialized_certificate_info )\n\n\nOptions:\n --serialized_certificate_info= serialized_certificate_info : (str) certificate info\n\nReturns:\n (dict) Result dictionary", + "title": "channel_import" + }, + { + "location": "/cli/#channel_list", + "text": "Get certificate claim infos for channels that can be published to\n\nUsage:\n channel_list\n\n\nOptions:\n None\n\nReturns:\n (list) ClaimDict, includes 'is_mine' field to indicate if the certificate claim\n is in the wallet.", + "title": "channel_list" + }, + { + "location": "/cli/#channel_new", + "text": "Generate a publisher key and create a new '@' prefixed certificate claim\n\nUsage:\n channel_new ( channel_name | --channel_name= channel_name )\n ( amount | --amount= amount )\n\n\nOptions:\n --channel_name= channel_name : (str) name of the channel prefixed with '@'\n --amount= amount : (float) bid amount on the channel\n\nReturns:\n (dict) Dictionary containing result of the claim\n {\n 'tx' : (str) hex encoded transaction\n 'txid' : (str) txid of resulting claim\n 'nout' : (int) nout of the resulting claim\n 'fee' : (float) fee paid for the claim transaction\n 'claim_id' : (str) claim ID of the resulting claim\n }", + "title": "channel_new" + }, + { + "location": "/cli/#claim_abandon", + "text": "Abandon a name and reclaim credits from the claim\n\nUsage:\n claim_abandon [ claim_id | --claim_id= claim_id ]\n [ txid | --txid= txid ] [ nout | --nout= nout ]\n\n\nOptions:\n --claim_id= claim_id : (str) claim_id of the claim to abandon\n --txid= txid : (str) txid of the claim to abandon\n --nout= nout : (int) nout of the claim to abandon\n\nReturns:\n (dict) Dictionary containing result of the claim\n {\n txid : (str) txid of resulting transaction\n fee : (float) fee paid for the transaction\n }", + "title": "claim_abandon" + }, + { + "location": "/cli/#claim_list", + "text": "List current claims and information about them for a given name\n\nUsage:\n claim_list ( name | --name= name )\n\n\nOptions:\n --name= name : (str) name of the claim to list info about\n\nReturns:\n (dict) State of claims assigned for the name\n {\n 'claims': (list) list of claims for the name\n [\n {\n 'amount': (float) amount assigned to the claim\n 'effective_amount': (float) total amount assigned to the claim,\n including supports\n 'claim_id': (str) claim ID of the claim\n 'height': (int) height of block containing the claim\n 'txid': (str) txid of the claim\n 'nout': (int) nout of the claim\n 'permanent_url': (str) permanent url of the claim,\n 'supports': (list) a list of supports attached to the claim\n 'value': (str) the value of the claim\n },\n ]\n 'supports_without_claims': (list) supports without any claims attached to them\n 'last_takeover_height': (int) the height of last takeover for the name\n }", + "title": "claim_list" + }, + { + "location": "/cli/#claim_list_by_channel", + "text": "Get paginated claims in a channel specified by a channel uri\n\nUsage:\n claim_list_by_channel ( uri | --uri= uri ) [ uris ...] [--page= page ]\n [--page_size= page_size ]\n\n\nOptions:\n --uri= uri : (str) uri of the channel\n --uris= uris : (list) uris of the channel\n --page= page : (int) which page of results to return where page 1 is the first\n page, defaults to no pages\n --page_size= page_size : (int) number of results in a page, default of 10\n\nReturns:\n {\n resolved channel uri: {\n If there was an error:\n 'error': (str) error message\n\n 'claims_in_channel': the total number of results for the channel,\n\n If a page of results was requested:\n 'returned_page': page number returned,\n 'claims_in_channel': [\n {\n 'absolute_channel_position': (int) claim index number in sorted list of\n claims which assert to be part of the\n channel\n 'address': (str) claim address,\n 'amount': (float) claim amount,\n 'effective_amount': (float) claim amount including supports,\n 'claim_id': (str) claim id,\n 'claim_sequence': (int) claim sequence number,\n 'decoded_claim': (bool) whether or not the claim value was decoded,\n 'height': (int) claim height,\n 'depth': (int) claim depth,\n 'has_signature': (bool) included if decoded_claim\n 'name': (str) claim name,\n 'supports: (list) list of supports [{'txid': (str) txid,\n 'nout': (int) nout,\n 'amount': (float) amount}],\n 'txid': (str) claim txid,\n 'nout': (str) claim nout,\n 'signature_is_valid': (bool), included if has_signature,\n 'value': ClaimDict if decoded, otherwise hex string\n }\n ],\n }\n }", + "title": "claim_list_by_channel" + }, + { + "location": "/cli/#claim_list_mine", + "text": "List my name claims\n\nUsage:\n claim_list_mine\n\n\nOptions:\n None\n\nReturns:\n (list) List of name claims owned by user\n [\n {\n 'address': (str) address that owns the claim\n 'amount': (float) amount assigned to the claim\n 'blocks_to_expiration': (int) number of blocks until it expires\n 'category': (str) claim , update , or support \n 'claim_id': (str) claim ID of the claim\n 'confirmations': (int) number of blocks of confirmations for the claim\n 'expiration_height': (int) the block height which the claim will expire\n 'expired': (bool) true if expired, false otherwise\n 'height': (int) height of the block containing the claim\n 'is_spent': (bool) true if claim is abandoned, false otherwise\n 'name': (str) name of the claim\n 'permanent_url': (str) permanent url of the claim,\n 'txid': (str) txid of the cliam\n 'nout': (int) nout of the claim\n 'value': (str) value of the claim\n },\n ]", + "title": "claim_list_mine" + }, + { + "location": "/cli/#claim_new_support", + "text": "Support a name claim\n\nUsage:\n claim_new_support ( name | --name= name ) ( claim_id | --claim_id= claim_id )\n ( amount | --amount= amount )\n\n\nOptions:\n --name= name : (str) name of the claim to support\n --claim_id= claim_id : (str) claim_id of the claim to support\n --amount= amount : (float) amount of support\n\nReturns:\n (dict) Dictionary containing result of the claim\n {\n txid : (str) txid of resulting support claim\n nout : (int) nout of the resulting support claim\n fee : (float) fee paid for the transaction\n }", + "title": "claim_new_support" + }, + { + "location": "/cli/#claim_renew", + "text": "Renew claim(s) or support(s)\n\nUsage:\n claim_renew ( outpoint | --outpoint= outpoint ) | ( height | --height= height )\n\n\nOptions:\n --outpoint= outpoint : (str) outpoint of the claim to renew\n --height= height : (str) update claims expiring before or at this block height\n\nReturns:\n (dict) Dictionary where key is the the original claim's outpoint and\n value is the result of the renewal\n {\n outpoint:{\n\n 'tx' : (str) hex encoded transaction\n 'txid' : (str) txid of resulting claim\n 'nout' : (int) nout of the resulting claim\n 'fee' : (float) fee paid for the claim transaction\n 'claim_id' : (str) claim ID of the resulting claim\n },\n }", + "title": "claim_renew" + }, + { + "location": "/cli/#claim_send_to_address", + "text": "Send a name claim to an address\n\nUsage:\n claim_send_to_address ( claim_id | --claim_id= claim_id )\n ( address | --address= address )\n [ amount | --amount= amount ]\n\n\nOptions:\n --claim_id= claim_id : (str) claim_id to send\n --address= address : (str) address to send the claim to\n --amount amount : (int) Amount of credits to claim name for, defaults to the current amount\n on the claim\n\nReturns:\n (dict) Dictionary containing result of the claim\n {\n 'tx' : (str) hex encoded transaction\n 'txid' : (str) txid of resulting claim\n 'nout' : (int) nout of the resulting claim\n 'fee' : (float) fee paid for the claim transaction\n 'claim_id' : (str) claim ID of the resulting claim\n }", + "title": "claim_send_to_address" + }, + { + "location": "/cli/#claim_show", + "text": "Resolve claim info from txid/nout or with claim ID\n\nUsage:\n claim_show [ txid | --txid= txid ] [ nout | --nout= nout ]\n [ claim_id | --claim_id= claim_id ]\n\n\nOptions:\n --txid= txid : (str) look for claim with this txid, nout must\n also be specified\n --nout= nout : (int) look for claim with this nout, txid must\n also be specified\n --claim_id= claim_id : (str) look for claim with this claim id\n\nReturns:\n (dict) Dictionary containing claim info as below,\n\n {\n 'txid': (str) txid of claim\n 'nout': (int) nout of claim\n 'amount': (float) amount of claim\n 'value': (str) value of claim\n 'height' : (int) height of claim takeover\n 'claim_id': (str) claim ID of claim\n 'supports': (list) list of supports associated with claim\n }\n\n if claim cannot be resolved, dictionary as below will be returned\n\n {\n 'error': (str) reason for error\n }", + "title": "claim_show" + }, + { + "location": "/cli/#cli_test_command", + "text": "This command is only for testing the CLI argument parsing\nUsage:\n cli_test_command [--a_arg] [--b_arg] ( pos_arg | --pos_arg= pos_arg )\n [ pos_args ...] [--pos_arg2= pos_arg2 ]\n [--pos_arg3= pos_arg3 ]\n\n\nOptions:\n --a_arg : a arg\n --b_arg : b arg\n --pos_arg= pos_arg : pos arg\n --pos_args= pos_args : pos args\n --pos_arg2= pos_arg2 : pos arg 2\n --pos_arg3= pos_arg3 : pos arg 3\n\nReturns:\n pos args", + "title": "cli_test_command" + }, + { + "location": "/cli/#commands", + "text": "Return a list of available commands\n\nUsage:\n commands\n\n\nOptions:\n None\n\nReturns:\n (list) list of available commands", + "title": "commands" + }, + { + "location": "/cli/#daemon_stop", + "text": "Stop lbrynet-daemon\n\nUsage:\n daemon_stop\n\n\nOptions:\n None\n\nReturns:\n (string) Shutdown message", + "title": "daemon_stop" + }, + { + "location": "/cli/#file_delete", + "text": "Delete a LBRY file\n\nUsage:\n file_delete [--delete_from_download_dir] [--delete_all] [--sd_hash= sd_hash ] [--file_name= file_name ]\n [--stream_hash= stream_hash ] [--rowid= rowid ] [--claim_id= claim_id ] [--txid= txid ]\n [--nout= nout ] [--claim_name= claim_name ] [--channel_claim_id= channel_claim_id ]\n [--channel_name= channel_name ]\n\n\nOptions:\n --delete_from_download_dir : (bool) delete file from download directory,\n instead of just deleting blobs\n --delete_all : (bool) if there are multiple matching files,\n allow the deletion of multiple files.\n Otherwise do not delete anything.\n --sd_hash= sd_hash : (str) delete by file sd hash\n --file_name file_name : (str) delete by file name in downloads folder\n --stream_hash= stream_hash : (str) delete by file stream hash\n --rowid= rowid : (int) delete by file row id\n --claim_id= claim_id : (str) delete by file claim id\n --txid= txid : (str) delete by file claim txid\n --nout= nout : (int) delete by file claim nout\n --claim_name= claim_name : (str) delete by file claim name\n --channel_claim_id= channel_claim_id : (str) delete by file channel claim id\n --channel_name= channel_name : (str) delete by file channel claim name\n\nReturns:\n (bool) true if deletion was successful", + "title": "file_delete" + }, + { + "location": "/cli/#file_list", + "text": "List files limited by optional filters\n\nUsage:\n file_list [--sd_hash= sd_hash ] [--file_name= file_name ] [--stream_hash= stream_hash ]\n [--rowid= rowid ] [--claim_id= claim_id ] [--outpoint= outpoint ] [--txid= txid ] [--nout= nout ]\n [--channel_claim_id= channel_claim_id ] [--channel_name= channel_name ]\n [--claim_name= claim_name ] [--full_status]\n\n\nOptions:\n --sd_hash= sd_hash : (str) get file with matching sd hash\n --file_name= file_name : (str) get file with matching file name in the\n downloads folder\n --stream_hash= stream_hash : (str) get file with matching stream hash\n --rowid= rowid : (int) get file with matching row id\n --claim_id= claim_id : (str) get file with matching claim id\n --outpoint= outpoint : (str) get file with matching claim outpoint\n --txid= txid : (str) get file with matching claim txid\n --nout= nout : (int) get file with matching claim nout\n --channel_claim_id= channel_claim_id : (str) get file with matching channel claim id\n --channel_name= channel_name : (str) get file with matching channel name\n --claim_name= claim_name : (str) get file with matching claim name\n --full_status : (bool) full status, populate the\n 'message' and 'size' fields\n\nReturns:\n (list) List of files\n\n [\n {\n 'completed': (bool) true if download is completed,\n 'file_name': (str) name of file,\n 'download_directory': (str) download directory,\n 'points_paid': (float) credit paid to download file,\n 'stopped': (bool) true if download is stopped,\n 'stream_hash': (str) stream hash of file,\n 'stream_name': (str) stream name ,\n 'suggested_file_name': (str) suggested file name,\n 'sd_hash': (str) sd hash of file,\n 'download_path': (str) download path of file,\n 'mime_type': (str) mime type of file,\n 'key': (str) key attached to file,\n 'total_bytes': (int) file size in bytes, None if full_status is false,\n 'written_bytes': (int) written size in bytes,\n 'blobs_completed': (int) num_completed, None if full_status is false,\n 'blobs_in_stream': (int) None if full_status is false,\n 'status': (str) downloader status, None if full_status is false,\n 'claim_id': (str) None if full_status is false or if claim is not found,\n 'outpoint': (str) None if full_status is false or if claim is not found,\n 'txid': (str) None if full_status is false or if claim is not found,\n 'nout': (int) None if full_status is false or if claim is not found,\n 'metadata': (dict) None if full_status is false or if claim is not found,\n 'channel_claim_id': (str) None if full_status is false or if claim is not found or signed,\n 'channel_name': (str) None if full_status is false or if claim is not found or signed,\n 'claim_name': (str) None if full_status is false or if claim is not found\n },\n ]", + "title": "file_list" + }, + { + "location": "/cli/#file_reflect", + "text": "Reflect all the blobs in a file matching the filter criteria\n\nUsage:\n file_reflect [--sd_hash= sd_hash ] [--file_name= file_name ]\n [--stream_hash= stream_hash ] [--rowid= rowid ]\n [--reflector= reflector ]\n\n\nOptions:\n --sd_hash= sd_hash : (str) get file with matching sd hash\n --file_name= file_name : (str) get file with matching file name in the\n downloads folder\n --stream_hash= stream_hash : (str) get file with matching stream hash\n --rowid= rowid : (int) get file with matching row id\n --reflector= reflector : (str) reflector server, ip address or url\n by default choose a server from the config\n\nReturns:\n (list) list of blobs reflected", + "title": "file_reflect" + }, + { + "location": "/cli/#file_set_status", + "text": "Start or stop downloading a file\n\nUsage:\n file_set_status ( status | --status= status ) [--sd_hash= sd_hash ]\n [--file_name= file_name ] [--stream_hash= stream_hash ] [--rowid= rowid ]\n\n\nOptions:\n --status= status : (str) one of start or stop \n --sd_hash= sd_hash : (str) set status of file with matching sd hash\n --file_name= file_name : (str) set status of file with matching file name in the\n downloads folder\n --stream_hash= stream_hash : (str) set status of file with matching stream hash\n --rowid= rowid : (int) set status of file with matching row id\n\nReturns:\n (str) Confirmation message", + "title": "file_set_status" + }, + { + "location": "/cli/#get", + "text": "Download stream from a LBRY name.\n\nUsage:\n get uri [ file_name | --file_name= file_name ] [ timeout | --timeout= timeout ]\n\n\n\nOptions:\n --uri= uri : (str) uri of the content to download\n --file_name= file_name : (str) specified name for the downloaded file\n --timeout= timeout : (int) download timeout in number of seconds\n\nReturns:\n (dict) Dictionary containing information about the stream\n {\n 'completed': (bool) true if download is completed,\n 'file_name': (str) name of file,\n 'download_directory': (str) download directory,\n 'points_paid': (float) credit paid to download file,\n 'stopped': (bool) true if download is stopped,\n 'stream_hash': (str) stream hash of file,\n 'stream_name': (str) stream name ,\n 'suggested_file_name': (str) suggested file name,\n 'sd_hash': (str) sd hash of file,\n 'download_path': (str) download path of file,\n 'mime_type': (str) mime type of file,\n 'key': (str) key attached to file,\n 'total_bytes': (int) file size in bytes, None if full_status is false,\n 'written_bytes': (int) written size in bytes,\n 'blobs_completed': (int) num_completed, None if full_status is false,\n 'blobs_in_stream': (int) None if full_status is false,\n 'status': (str) downloader status, None if full_status is false,\n 'claim_id': (str) claim id,\n 'outpoint': (str) claim outpoint string,\n 'txid': (str) claim txid,\n 'nout': (int) claim nout,\n 'metadata': (dict) claim metadata,\n 'channel_claim_id': (str) None if claim is not signed\n 'channel_name': (str) None if claim is not signed\n 'claim_name': (str) claim name\n }", + "title": "get" + }, + { + "location": "/cli/#help", + "text": "Return a useful message for an API command\n\nUsage:\n help [ command | --command= command ]\n\n\nOptions:\n --command= command : (str) command to retrieve documentation for\n\nReturns:\n (str) Help message", + "title": "help" + }, + { + "location": "/cli/#peer_list", + "text": "Get peers for blob hash\n\nUsage:\n peer_list ( blob_hash | --blob_hash= blob_hash ) [ timeout | --timeout= timeout ]\n\n\nOptions:\n --blob_hash= blob_hash : (str) find available peers for this blob hash\n --timeout= timeout : (int) peer search timeout in seconds\n\nReturns:\n (list) List of contacts", + "title": "peer_list" + }, + { + "location": "/cli/#publish", + "text": "Make a new name claim and publish associated data to lbrynet,\nupdate over existing claim if user already has a claim for name.\n\nFields required in the final Metadata are:\n 'title'\n 'description'\n 'author'\n 'language'\n 'license'\n 'nsfw'\n\nMetadata can be set by either using the metadata argument or by setting individual arguments\nfee, title, description, author, language, license, license_url, thumbnail, preview, nsfw,\nor sources. Individual arguments will overwrite the fields specified in metadata argument.\n\nUsage:\n publish ( name | --name= name ) ( bid | --bid= bid ) [--metadata= metadata ]\n [--file_path= file_path ] [--fee= fee ] [--title= title ]\n [--description= description ] [--author= author ] [--language= language ]\n [--license= license ] [--license_url= license_url ] [--thumbnail= thumbnail ]\n [--preview= preview ] [--nsfw= nsfw ] [--sources= sources ]\n [--channel_name= channel_name ] [--channel_id= channel_id ]\n [--claim_address= claim_address ] [--change_address= change_address ]\n\n\nOptions:\n --name= name : (str) name of the content\n --bid= bid : (float) amount to back the claim\n --metadata= metadata : (dict) ClaimDict to associate with the claim.\n --file_path= file_path : (str) path to file to be associated with name. If provided,\n a lbry stream of this file will be used in 'sources'.\n If no path is given but a sources dict is provided,\n it will be used. If neither are provided, an\n error is raised.\n --fee= fee : (dict) Dictionary representing key fee to download content:\n {\n 'currency': currency_symbol,\n 'amount': float,\n 'address': str, optional\n }\n supported currencies: LBC, USD, BTC\n If an address is not provided a new one will be\n automatically generated. Default fee is zero.\n --title= title : (str) title of the publication\n --description= description : (str) description of the publication\n --author= author : (str) author of the publication\n --language= language : (str) language of the publication\n --license= license : (str) publication license\n --license_url= license_url : (str) publication license url\n --thumbnail= thumbnail : (str) thumbnail url\n --preview= preview : (str) preview url\n --nsfw= nsfw : (bool) title of the publication\n --sources= sources : (str) {'lbry_sd_hash': sd_hash} specifies sd hash of file\n --channel_name= channel_name : (str) name of the publisher channel name in the wallet\n --channel_id= channel_id : (str) claim id of the publisher channel, does not check\n for channel claim being in the wallet. This allows\n publishing to a channel where only the certificate\n private key is in the wallet.\n --claim_address= claim_address : (str) address where the claim is sent to, if not specified\n new address wil automatically be created\n\nReturns:\n (dict) Dictionary containing result of the claim\n {\n 'tx' : (str) hex encoded transaction\n 'txid' : (str) txid of resulting claim\n 'nout' : (int) nout of the resulting claim\n 'fee' : (float) fee paid for the claim transaction\n 'claim_id' : (str) claim ID of the resulting claim\n }", + "title": "publish" + }, + { + "location": "/cli/#report_bug", + "text": "Report a bug to slack\n\nUsage:\n report_bug ( message | --message= message )\n\n\nOptions:\n --message= message : (str) Description of the bug\n\nReturns:\n (bool) true if successful", + "title": "report_bug" + }, + { + "location": "/cli/#resolve", + "text": "Resolve given LBRY URIs\n\nUsage:\n resolve [--force] ( uri | --uri= uri ) [ uris ...]\n\n\nOptions:\n --force : (bool) force refresh and ignore cache\n --uri= uri : (str) uri to resolve\n --uris= uris : (list) uris to resolve\n\nReturns:\n Dictionary of results, keyed by uri\n ' uri ': {\n If a resolution error occurs:\n 'error': Error message\n\n If the uri resolves to a channel or a claim in a channel:\n 'certificate': {\n 'address': (str) claim address,\n 'amount': (float) claim amount,\n 'effective_amount': (float) claim amount including supports,\n 'claim_id': (str) claim id,\n 'claim_sequence': (int) claim sequence number,\n 'decoded_claim': (bool) whether or not the claim value was decoded,\n 'height': (int) claim height,\n 'depth': (int) claim depth,\n 'has_signature': (bool) included if decoded_claim\n 'name': (str) claim name,\n 'permanent_url': (str) permanent url of the certificate claim,\n 'supports: (list) list of supports [{'txid': (str) txid,\n 'nout': (int) nout,\n 'amount': (float) amount}],\n 'txid': (str) claim txid,\n 'nout': (str) claim nout,\n 'signature_is_valid': (bool), included if has_signature,\n 'value': ClaimDict if decoded, otherwise hex string\n }\n\n If the uri resolves to a channel:\n 'claims_in_channel': (int) number of claims in the channel,\n\n If the uri resolves to a claim:\n 'claim': {\n 'address': (str) claim address,\n 'amount': (float) claim amount,\n 'effective_amount': (float) claim amount including supports,\n 'claim_id': (str) claim id,\n 'claim_sequence': (int) claim sequence number,\n 'decoded_claim': (bool) whether or not the claim value was decoded,\n 'height': (int) claim height,\n 'depth': (int) claim depth,\n 'has_signature': (bool) included if decoded_claim\n 'name': (str) claim name,\n 'permanent_url': (str) permanent url of the claim,\n 'channel_name': (str) channel name if claim is in a channel\n 'supports: (list) list of supports [{'txid': (str) txid,\n 'nout': (int) nout,\n 'amount': (float) amount}]\n 'txid': (str) claim txid,\n 'nout': (str) claim nout,\n 'signature_is_valid': (bool), included if has_signature,\n 'value': ClaimDict if decoded, otherwise hex string\n }\n }", + "title": "resolve" + }, + { + "location": "/cli/#resolve_name", + "text": "Resolve stream info from a LBRY name\n\nUsage:\n resolve_name ( name | --name= name ) [--force]\n\n\nOptions:\n --name= name : (str) the name to resolve\n --force : (bool) force refresh and do not check cache\n\nReturns:\n (dict) Metadata dictionary from name claim, None if the name is not\n resolvable", + "title": "resolve_name" + }, + { + "location": "/cli/#routing_table_get", + "text": "Get DHT routing information\n\nUsage:\n routing_table_get\n\n\nOptions:\n None\n\nReturns:\n (dict) dictionary containing routing and contact information\n {\n buckets : {\n bucket index : [\n {\n address : (str) peer address,\n node_id : (str) peer node id,\n blobs : (list) blob hashes announced by peer\n }\n ]\n },\n contacts : (list) contact node ids,\n blob_hashes : (list) all of the blob hashes stored by peers in the list of buckets,\n node_id : (str) the local dht node id\n }", + "title": "routing_table_get" + }, + { + "location": "/cli/#settings_get", + "text": "Get daemon settings\n\nUsage:\n settings_get\n\n\nOptions:\n None\n\nReturns:\n (dict) Dictionary of daemon settings\n See ADJUSTABLE_SETTINGS in lbrynet/conf.py for full list of settings", + "title": "settings_get" + }, + { + "location": "/cli/#settings_set", + "text": "Set daemon settings\n\nUsage:\n settings_set [--download_directory= download_directory ]\n [--data_rate= data_rate ]\n [--download_timeout= download_timeout ]\n [--peer_port= peer_port ]\n [--max_key_fee= max_key_fee ]\n [--disable_max_key_fee= disable_max_key_fee ]\n [--use_upnp= use_upnp ]\n [--run_reflector_server= run_reflector_server ]\n [--cache_time= cache_time ]\n [--reflect_uploads= reflect_uploads ]\n [--share_usage_data= share_usage_data ]\n [--peer_search_timeout= peer_search_timeout ]\n [--sd_download_timeout= sd_download_timeout ]\n [--auto_renew_claim_height_delta= auto_renew_claim_height_delta ]\n\n\nOptions:\n --download_directory= download_directory : (str) path of download directory\n --data_rate= data_rate : (float) 0.0001\n --download_timeout= download_timeout : (int) 180\n --peer_port= peer_port : (int) 3333\n --max_key_fee= max_key_fee : (dict) maximum key fee for downloads,\n in the format:\n {\n 'currency': currency_symbol ,\n 'amount': amount \n }.\n In the CLI, it must be an escaped JSON string\n Supported currency symbols: LBC, USD, BTC\n --disable_max_key_fee= disable_max_key_fee : (bool) False\n --use_upnp= use_upnp : (bool) True\n --run_reflector_server= run_reflector_server : (bool) False\n --cache_time= cache_time : (int) 150\n --reflect_uploads= reflect_uploads : (bool) True\n --share_usage_data= share_usage_data : (bool) True\n --peer_search_timeout= peer_search_timeout : (int) 3\n --sd_download_timeout= sd_download_timeout : (int) 3\n --auto_renew_claim_height_delta= auto_renew_claim_height_delta : (int) 0\n claims set to expire within this many blocks will be\n automatically renewed after startup (if set to 0, renews\n will not be made automatically)\n\nReturns:\n (dict) Updated dictionary of daemon settings", + "title": "settings_set" + }, + { + "location": "/cli/#status", + "text": "Get daemon status\n\nUsage:\n status [--session_status] [--dht_status]\n\n\nOptions:\n --session_status : (bool) include session status in results\n --dht_status : (bool) include dht network and peer status\n\nReturns:\n (dict) lbrynet-daemon status\n {\n 'lbry_id': lbry peer id, base58,\n 'installation_id': installation id, base58,\n 'is_running': bool,\n 'is_first_run': bool,\n 'startup_status': {\n 'code': status code,\n 'message': status message\n },\n 'connection_status': {\n 'code': connection status code,\n 'message': connection status message\n },\n 'blockchain_status': {\n 'blocks': local blockchain height,\n 'blocks_behind': remote_height - local_height,\n 'best_blockhash': block hash of most recent block,\n },\n 'wallet_is_encrypted': bool,\n\n If given the session status option:\n 'session_status': {\n 'managed_blobs': count of blobs in the blob manager,\n 'managed_streams': count of streams in the file manager\n 'announce_queue_size': number of blobs currently queued to be announced\n 'should_announce_blobs': number of blobs that should be announced\n }\n\n If given the dht status option:\n 'dht_status': {\n 'kbps_received': current kbps receiving,\n 'kbps_sent': current kdps being sent,\n 'total_bytes_sent': total bytes sent,\n 'total_bytes_received': total bytes received,\n 'queries_received': number of queries received per second,\n 'queries_sent': number of queries sent per second,\n 'recent_contacts': count of recently contacted peers,\n 'unique_contacts': count of unique peers\n },\n }", + "title": "status" + }, + { + "location": "/cli/#stream_availability", + "text": "Get stream availability for lbry uri\n\nUsage:\n stream_availability ( uri | --uri= uri )\n [ search_timeout | --search_timeout= search_timeout ]\n [ blob_timeout | --blob_timeout= blob_timeout ]\n\n\nOptions:\n --uri= uri : (str) check availability for this uri\n --search_timeout= search_timeout : (int) how long to search for peers for the blob\n in the dht\n --search_timeout= blob_timeout : (int) how long to try downloading from a peer\n\nReturns:\n (dict) {\n 'is_available': bool ,\n 'did_decode': bool ,\n 'did_resolve': bool ,\n 'is_stream': bool ,\n 'num_blobs_in_stream': int ,\n 'sd_hash': str ,\n 'sd_blob_availability': dict see `blob_availability`,\n 'head_blob_hash': str ,\n 'head_blob_availability': dict see `blob_availability`,\n 'use_upnp': bool ,\n 'upnp_redirect_is_set': bool ,\n 'error': None | str error message\n }", + "title": "stream_availability" + }, + { + "location": "/cli/#stream_cost_estimate", + "text": "Get estimated cost for a lbry stream\n\nUsage:\n stream_cost_estimate ( uri | --uri= uri ) [ size | --size= size ]\n\n\nOptions:\n --uri= uri : (str) uri to use\n --size= size : (float) stream size in bytes. if provided an sd blob won't be\n downloaded.\n\nReturns:\n (float) Estimated cost in lbry credits, returns None if uri is not\n resolvable", + "title": "stream_cost_estimate" + }, + { + "location": "/cli/#transaction_list", + "text": "List transactions belonging to wallet\n\nUsage:\n transaction_list\n\n\nOptions:\n None\n\nReturns:\n (list) List of transactions\n\n {\n claim_info : (list) claim info if in txn [{\n address : (str) address of claim,\n balance_delta : (float) bid amount,\n amount : (float) claim amount,\n claim_id : (str) claim id,\n claim_name : (str) claim name,\n nout : (int) nout\n }],\n abandon_info : (list) abandon info if in txn [{\n address : (str) address of abandoned claim,\n balance_delta : (float) returned amount,\n amount : (float) claim amount,\n claim_id : (str) claim id,\n claim_name : (str) claim name,\n nout : (int) nout\n }],\n confirmations : (int) number of confirmations for the txn,\n date : (str) date and time of txn,\n fee : (float) txn fee,\n support_info : (list) support info if in txn [{\n address : (str) address of support,\n balance_delta : (float) support amount,\n amount : (float) support amount,\n claim_id : (str) claim id,\n claim_name : (str) claim name,\n is_tip : (bool),\n nout : (int) nout\n }],\n timestamp : (int) timestamp,\n txid : (str) txn id,\n update_info : (list) update info if in txn [{\n address : (str) address of claim,\n balance_delta : (float) credited/debited\n amount : (float) absolute amount,\n claim_id : (str) claim id,\n claim_name : (str) claim name,\n nout : (int) nout\n }],\n value : (float) value of txn\n }", + "title": "transaction_list" + }, + { + "location": "/cli/#transaction_show", + "text": "Get a decoded transaction from a txid\n\nUsage:\n transaction_show ( txid | --txid= txid )\n\n\nOptions:\n --txid= txid : (str) txid of the transaction\n\nReturns:\n (dict) JSON formatted transaction", + "title": "transaction_show" + }, + { + "location": "/cli/#utxo_list", + "text": "List unspent transaction outputs\n\nUsage:\n utxo_list\n\n\nOptions:\n None\n\nReturns:\n (list) List of unspent transaction outputs (UTXOs)\n [\n {\n address : (str) the output address\n amount : (float) unspent amount\n height : (int) block height\n is_claim : (bool) is the tx a claim\n is_coinbase : (bool) is the tx a coinbase tx\n is_support : (bool) is the tx a support\n is_update : (bool) is the tx an update\n nout : (int) nout of the output\n txid : (str) txid of the output\n },\n ...\n ]", + "title": "utxo_list" + }, + { + "location": "/cli/#version", + "text": "Get lbry version information\n\nUsage:\n version\n\n\nOptions:\n None\n\nReturns:\n (dict) Dictionary of lbry version information\n {\n 'build': (str) build type (e.g. dev , rc , release ),\n 'ip': (str) remote ip, if available,\n 'lbrynet_version': (str) lbrynet_version,\n 'lbryum_version': (str) lbryum_version,\n 'lbryschema_version': (str) lbryschema_version,\n 'os_release': (str) os release string\n 'os_system': (str) os name\n 'platform': (str) platform string\n 'processor': (str) processor type,\n 'python_version': (str) python version,\n }", + "title": "version" + }, + { + "location": "/cli/#wallet_balance", + "text": "Return the balance of the wallet\n\nUsage:\n wallet_balance [ address | --address= address ] [--include_unconfirmed]\n\n\nOptions:\n --address= address : (str) If provided only the balance for this\n address will be given\n --include_unconfirmed : (bool) Include unconfirmed\n\nReturns:\n (float) amount of lbry credits in wallet", + "title": "wallet_balance" + }, + { + "location": "/cli/#wallet_decrypt", + "text": "Decrypt an encrypted wallet, this will remove the wallet password\n\nUsage:\n wallet_decrypt\n\n\nOptions:\n None\n\nReturns:\n (bool) true if wallet is decrypted, otherwise false", + "title": "wallet_decrypt" + }, + { + "location": "/cli/#wallet_encrypt", + "text": "Encrypt a wallet with a password, if the wallet is already encrypted this will update\nthe password\n\nUsage:\n wallet_encrypt ( new_password | --new_password= new_password )\n\n\nOptions:\n --new_password= new_password : (str) password string to be used for encrypting wallet\n\nReturns:\n (bool) true if wallet is decrypted, otherwise false", + "title": "wallet_encrypt" + }, + { + "location": "/cli/#wallet_is_address_mine", + "text": "Checks if an address is associated with the current wallet.\n\nUsage:\n wallet_is_address_mine ( address | --address= address )\n\n\nOptions:\n --address= address : (str) address to check\n\nReturns:\n (bool) true, if address is associated with current wallet", + "title": "wallet_is_address_mine" + }, + { + "location": "/cli/#wallet_list", + "text": "List wallet addresses\n\nUsage:\n wallet_list\n\n\nOptions:\n None\n\nReturns:\n List of wallet addresses", + "title": "wallet_list" + }, + { + "location": "/cli/#wallet_new_address", + "text": "Generate a new wallet address\n\nUsage:\n wallet_new_address\n\n\nOptions:\n None\n\nReturns:\n (str) New wallet address in base58", + "title": "wallet_new_address" + }, + { + "location": "/cli/#wallet_prefill_addresses", + "text": "Create new addresses, each containing `amount` credits\n\nUsage:\n wallet_prefill_addresses [--no_broadcast]\n ( num_addresses | --num_addresses= num_addresses )\n ( amount | --amount= amount )\n\n\nOptions:\n --no_broadcast : (bool) whether to broadcast or not\n --num_addresses= num_addresses : (int) num of addresses to create\n --amount= amount : (float) initial amount in each address\n\nReturns:\n (dict) the resulting transaction", + "title": "wallet_prefill_addresses" + }, + { + "location": "/cli/#wallet_public_key", + "text": "Get public key from wallet address\n\nUsage:\n wallet_public_key ( address | --address= address )\n\n\nOptions:\n --address= address : (str) address for which to get the public key\n\nReturns:\n (list) list of public keys associated with address.\n Could contain more than one public key if multisig.", + "title": "wallet_public_key" + }, + { + "location": "/cli/#wallet_send", + "text": "Send credits. If given an address, send credits to it. If given a claim id, send a tip\nto the owner of a claim specified by uri. A tip is a claim support where the recipient\nof the support is the claim address for the claim being supported.\n\nUsage:\n wallet_send ( amount | --amount= amount )\n (( address | --address= address ) | ( claim_id | --claim_id= claim_id ))\n\n\nOptions:\n --amount= amount : (float) amount of credit to send\n --address= address : (str) address to send credits to\n --claim_id= claim_id : (float) claim_id of the claim to send to tip to\n\nReturns:\n If sending to an address:\n (bool) true if payment successfully scheduled\n\n If sending a claim tip:\n (dict) Dictionary containing the result of the support\n {\n txid : (str) txid of resulting support claim\n nout : (int) nout of the resulting support claim\n fee : (float) fee paid for the transaction\n }", + "title": "wallet_send" + }, + { + "location": "/cli/#wallet_unlock", + "text": "Unlock an encrypted wallet\n\nUsage:\n wallet_unlock ( password | --password= password )\n\n\nOptions:\n --password= password : (str) password for unlocking wallet\n\nReturns:\n (bool) true if wallet is unlocked, otherwise false", + "title": "wallet_unlock" + }, + { + "location": "/cli/#wallet_unused_address", + "text": "Return an address containing no balance, will create\na new address if there is none.\n\nUsage:\n wallet_unused_address\n\n\nOptions:\n None\n\nReturns:\n (str) Unused wallet address in base58", + "title": "wallet_unused_address" + } + ] +} \ No newline at end of file diff --git a/mkdocs.yml b/mkdocs.yml index 1455aa742..ecb6442f3 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -4,7 +4,8 @@ pages: - "API": index.md - "CLI": cli.md theme: material -site_dir: docs_build +site_dir: docs +docs_dir: docs_build google_analytics: - 'UA-60403362-1' - 'auto' diff --git a/scripts/gen_api_docs.py b/scripts/gen_api_docs.py index 0363f35ca..701aad90e 100644 --- a/scripts/gen_api_docs.py +++ b/scripts/gen_api_docs.py @@ -2,7 +2,7 @@ # Generate docs: python gen_api_docs.py # See docs: pip install mkdocs; mkdocs serve -# Push docs: mkdocs gh-deploy +# Push docs: mkdocs build import inspect import sys @@ -15,6 +15,7 @@ INDENT = " " REQD_CMD_REGEX = r"\(.*?=<(?P.*?)>\)" OPT_CMD_REGEX = r"\[.*?=<(?P.*?)>\]" CMD_REGEX = r"--.*?(?P.*?)[=,\s,<]" +DOCS_DIR = "docs_build" def _tabulate_options(_options_docstr, method, reqd_matches, opt_matches): @@ -88,7 +89,7 @@ def _doc(obj): def main(): curdir = op.dirname(op.realpath(__file__)) - api_doc_path = op.realpath(op.join(curdir, '..', 'docs', 'index.md')) + api_doc_path = op.realpath(op.join(curdir, '..', DOCS_DIR, 'index.md')) docs = '' for method_name in sorted(Daemon.callable_methods.keys()): diff --git a/scripts/gen_cli_docs.py b/scripts/gen_cli_docs.py index f6d34bcbc..7aaa0954b 100644 --- a/scripts/gen_cli_docs.py +++ b/scripts/gen_cli_docs.py @@ -1,8 +1,8 @@ # -*- coding: utf-8 -*- -# Generate docs: python gen_api_docs.py +# Generate docs: python gen_cli_docs.py # See docs: pip install mkdocs; mkdocs serve -# Push docs: mkdocs gh-deploy +# Push docs: mkdocs build import inspect import os.path as op @@ -11,6 +11,7 @@ from tabulate import tabulate from lbrynet.daemon.Daemon import Daemon INDENT = " " +DOCS_DIR = "docs_build" def _tabulate_options(_options_docstr, method): @@ -69,7 +70,7 @@ def _doc(obj): def main(): curdir = op.dirname(op.realpath(__file__)) - cli_doc_path = op.realpath(op.join(curdir, '..', 'docs', 'cli.md')) + cli_doc_path = op.realpath(op.join(curdir, '..', DOCS_DIR, 'cli.md')) docs = '' for method_name in sorted(Daemon.callable_methods.keys()): diff --git a/scripts/gen_docs.py b/scripts/gen_docs.py new file mode 100644 index 000000000..b1fb364bb --- /dev/null +++ b/scripts/gen_docs.py @@ -0,0 +1,10 @@ +import gen_cli_docs +import gen_api_docs +import os.path as op +import subprocess + +gen_cli_docs.main() +gen_api_docs.main() +cwd = op.dirname(op.realpath(__file__)) +cwd = op.realpath(op.join(cwd, "..")) +proc = subprocess.Popen("mkdocs build", cwd=cwd, shell=True) From c7bfb0a2e75f51c5144be5ec892a77b13223f244 Mon Sep 17 00:00:00 2001 From: hackrush Date: Thu, 1 Mar 2018 21:42:30 +0530 Subject: [PATCH 4/5] ImportError checking and kill process after completion --- scripts/gen_docs.py | 21 ++++++++++++++++++--- 1 file changed, 18 insertions(+), 3 deletions(-) diff --git a/scripts/gen_docs.py b/scripts/gen_docs.py index b1fb364bb..1a076d299 100644 --- a/scripts/gen_docs.py +++ b/scripts/gen_docs.py @@ -1,10 +1,25 @@ -import gen_cli_docs -import gen_api_docs import os.path as op import subprocess +try: + import mkdocs +except ImportError: + raise ImportError("mkdocs is not installed") + +try: + import tabulate +except ImportError: + raise ImportError("tabulate is not installed") + +try: + import gen_cli_docs + import gen_api_docs +except ImportError: + raise ImportError("Probably not inside the lbry's virtual environment or daemon not installed") + gen_cli_docs.main() gen_api_docs.main() cwd = op.dirname(op.realpath(__file__)) cwd = op.realpath(op.join(cwd, "..")) -proc = subprocess.Popen("mkdocs build", cwd=cwd, shell=True) +proc = subprocess.Popen("exec mkdocs build", cwd=cwd, shell=True) +proc.kill() From 53c5134591c53efdd222a9fa3c86a83cf7359ae0 Mon Sep 17 00:00:00 2001 From: hackrush Date: Tue, 6 Mar 2018 03:45:52 +0530 Subject: [PATCH 5/5] Consolidated all 3 scripts into 1, better error messages --- CHANGELOG.md | 49 +--------- scripts/gen_api_docs.py | 105 --------------------- scripts/gen_cli_docs.py | 86 ----------------- scripts/gen_docs.py | 204 ++++++++++++++++++++++++++++++++++++---- 4 files changed, 189 insertions(+), 255 deletions(-) delete mode 100644 scripts/gen_api_docs.py delete mode 100644 scripts/gen_cli_docs.py diff --git a/CHANGELOG.md b/CHANGELOG.md index 75ea8d158..be70701b2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -13,22 +13,6 @@ at anytime. * ### Fixed - * - * improper parsing of arguments to CLI settings_set (https://github.com/lbryio/lbry/issues/930) - * unnecessarily verbose exchange rate error (https://github.com/lbryio/lbry/issues/984) - * value error due to a race condition when saving to the claim cache (https://github.com/lbryio/lbry/issues/1013) - * being unable to re-download updated content (https://github.com/lbryio/lbry/issues/951) - * sending error messages for failed api requests - * file manager startup being slow when handling thousands of files - * handling decryption error for blobs encrypted with an invalid key - * handling stream with no data blob (https://github.com/lbryio/lbry/issues/905) - * fetching the external ip - * `blob_list` returning an error with --uri parameter and incorrectly returning `[]` for streams where blobs are known (https://github.com/lbryio/lbry/issues/895) - * `get` failing with a non-useful error message when given a uri for a channel claim - * exception checking in several wallet unit tests - * daemon not erring properly for non-numeric values being passed to the `bid` parameter for the `publish` method - * `publish` command to allow updating claims with a `bid` amount higher than the wallet balance, so long as the amount is less than the wallet balance plus the bid amount of the claim being updated (https://github.com/lbryio/lbry/issues/748) - * incorrect `blob_num` for the stream terminator blob, which would result in creating invalid streams. Such invalid streams are detected on startup and are automatically removed (https://github.com/lbryio/lbry/issues/1124) * fixed the inconsistencies in API and CLI docstrings * @@ -40,30 +24,11 @@ at anytime. * * ### Added - * link to instructions on how to change the default peer port - * `lbrynet-console`, a tool to run or connect to lbrynet-daemon and launch an interactive python console with the api functions built in. - * `--conf` CLI flag to specify an alternate config file - * `peer_port`, `disable_max_key_fee`, `auto_renew_claim_height_delta`, `blockchain_name`, and `lbryum_servers` to configurable settings - * `wallet_unlock` command (available during startup to unlock an encrypted wallet) - * support for wallet encryption via new commands `wallet_decrypt` and `wallet_encrypt` - * `channel_import`, `channel_export`, and `claim_renew` commands - * `blob_availability` and `stream_availability` commands for debugging download issues - * a new startup stage to indicate if the daemon is waiting for the `wallet_unlock` command. - * `abandon_info` dictionary (containing `claim_name`, `claim_id`, `address`, `amount`, `balance_delta` and `nout`) for claims, supports, and updates returned by `transaction_list` - * `permanent_url` string to `channel_list_mine`, `claim_list`, `claim_show`, `resolve` and `resolve_name` (see lbryio/lbryum#203) - * `is_mine` boolean to `channel_list` results - * `txid`, `nout`, `channel_claim_id`, `channel_claim_name`, `status`, `blobs_completed`, and `blobs_in_stream` fields to file objects returned by `file_list` and `get` - * `txid`, `nout`, `channel_claim_id`, and `channel_claim_name` filters for `file` commands (`file_list`, `file_set_status`, `file_reflect`, and `file_delete`) - * unit tests for `SQLiteStorage` and updated old tests for relevant changes (https://github.com/lbryio/lbry/issues/1088) * scripts to autogenerate documentation - * - -### Added - * * ### Removed - * + * short(single dashed) arguments for `lbrynet-cli` * @@ -139,18 +104,6 @@ at anytime. * old storage classes used by the file manager, wallet, and blob manager * old `.db` database files from the data directory - * `seccure` and `gmpy` dependencies - * support for positional arguments in cli `settings_set`. Now only accepts settings changes in the form `--setting_key=value` - * `auto_re_reflect` setting from the conf file, use the `reflect_uploads` setting instead - * `name` argument for `claim_show` command - * `message` response field in file objects returned by `file_list` and `get` - * `include_tip_info` argument from `transaction_list`, which will now always include tip information. - * old and unused UI related code - * unnecessary `TempBlobManager` class - * old storage classes used by the file manager, wallet, and blob manager - * old `.db` database files from the data directory - * short(single dashed) arguments for `lbrynet-cli` - ## [0.18.0] - 2017-11-08 ### Fixed * Fixed amount of close nodes to add to list in case of extension to neighbouring k-buckets diff --git a/scripts/gen_api_docs.py b/scripts/gen_api_docs.py deleted file mode 100644 index 701aad90e..000000000 --- a/scripts/gen_api_docs.py +++ /dev/null @@ -1,105 +0,0 @@ -# -*- coding: utf-8 -*- - -# Generate docs: python gen_api_docs.py -# See docs: pip install mkdocs; mkdocs serve -# Push docs: mkdocs build - -import inspect -import sys -import re -import os.path as op -from tabulate import tabulate -from lbrynet.daemon.Daemon import Daemon - -INDENT = " " -REQD_CMD_REGEX = r"\(.*?=<(?P.*?)>\)" -OPT_CMD_REGEX = r"\[.*?=<(?P.*?)>\]" -CMD_REGEX = r"--.*?(?P.*?)[=,\s,<]" -DOCS_DIR = "docs_build" - - -def _tabulate_options(_options_docstr, method, reqd_matches, opt_matches): - _option_list = [] - for line in _options_docstr.splitlines(): - if (line.strip().startswith("--")): - # separates command name and description - parts = line.split(":", 1) - - # checks whether the command is optional or required - # and remove the cli type formatting and convert to - # api style formatitng - match = re.findall(CMD_REGEX, parts[0]) - - if match[0] not in reqd_matches: - parts[0] = "'" + match[0] + "' (optional)" - else: - parts[0] = "'" + match[0] + "'" - - # separates command type(in brackets) and description - new_parts = parts[1].lstrip().split(" ", 1) - else: - parts = [line] - - # len will be 2 when there's cmd name and description - if len(parts) == 2: - _option_list.append([parts[0], ":", new_parts[0], new_parts[1]]) - # len will be 1 when there's continuation of multiline description in the next line - # check `blob_announce`'s `stream_hash` command - elif len(parts) == 1: - _option_list.append([None, None, None, parts[0]]) - else: - print "Error: Ill formatted doc string for {}".format(method) - print "Error causing line: {}".format(line) - - # tabulate to make the options look pretty - _options_docstr_no_indent = tabulate(_option_list, missingval="", tablefmt="plain") - - # tabulate to make the options look pretty - _options_docstr = "" - for line in _options_docstr_no_indent.splitlines(): - _options_docstr += INDENT + line + '\n' - - return _options_docstr - - -def _doc(obj): - docstr = (inspect.getdoc(obj) or '').strip() - - try: - _desc, _docstr_after_desc = docstr.split("Usage:", 1) - _usage_docstr, _docstr_after_options = _docstr_after_desc.split("Options:", 1) - _options_docstr, _returns_docstr = _docstr_after_options.split("Returns:", 1) - except(ValueError): - print "Error: Ill formatted doc string for {}".format(obj) - return "Error!" - - opt_matches = re.findall(OPT_CMD_REGEX, _usage_docstr) - reqd_matches = re.findall(REQD_CMD_REGEX, _usage_docstr) - - _options_docstr = _tabulate_options(_options_docstr.strip(), obj, reqd_matches, opt_matches) - - docstr = _desc + \ - "Args:\n" + \ - _options_docstr + \ - "\nReturns:" + \ - _returns_docstr - - return docstr - - -def main(): - curdir = op.dirname(op.realpath(__file__)) - api_doc_path = op.realpath(op.join(curdir, '..', DOCS_DIR, 'index.md')) - - docs = '' - for method_name in sorted(Daemon.callable_methods.keys()): - method = Daemon.callable_methods[method_name] - docs += '## ' + method_name + "\n\n```text\n" + _doc(method) + "\n```\n\n" - - docs = "# LBRY JSON-RPC API Documentation\n\n" + docs - with open(api_doc_path, 'w+') as f: - f.write(docs) - - -if __name__ == '__main__': - sys.exit(main()) diff --git a/scripts/gen_cli_docs.py b/scripts/gen_cli_docs.py deleted file mode 100644 index 7aaa0954b..000000000 --- a/scripts/gen_cli_docs.py +++ /dev/null @@ -1,86 +0,0 @@ -# -*- coding: utf-8 -*- - -# Generate docs: python gen_cli_docs.py -# See docs: pip install mkdocs; mkdocs serve -# Push docs: mkdocs build - -import inspect -import os.path as op -import sys -from tabulate import tabulate -from lbrynet.daemon.Daemon import Daemon - -INDENT = " " -DOCS_DIR = "docs_build" - - -def _tabulate_options(_options_docstr, method): - _option_list = [] - for line in _options_docstr.splitlines(): - if (line.strip().startswith("--")): - # separates command name and description - parts = line.split(":", 1) - # separates command type(in brackets) and description - new_parts = parts[1].lstrip().split(" ", 1) - else: - parts = [line] - - # len will be 2 when there's cmd name and description - if len(parts) == 2: - _option_list.append([parts[0], ":", new_parts[0], new_parts[1]]) - # len will be 1 when there's continuation of multiline description in the next line - # check `blob_announce`'s `stream_hash` command - elif len(parts) == 1: - _option_list.append([None, None, None, parts[0]]) - else: - print "Error: Ill formatted doc string for {}".format(method) - print "Error causing line: {}".format(line) - - # tabulate to make the options look pretty - _options_docstr_no_indent = tabulate(_option_list, missingval="", tablefmt="plain") - - # Indent the options properly - _options_docstr = "" - for line in _options_docstr_no_indent.splitlines(): - _options_docstr += INDENT + line + '\n' - - return _options_docstr - - -def _doc(obj): - docstr = (inspect.getdoc(obj) or '').strip() - - try: - _usage_docstr, _docstr_after_options = docstr.split("Options:", 1) - _options_docstr, _returns_docstr = _docstr_after_options.split("Returns:", 1) - except(ValueError): - print "Error: Ill formatted doc string for {}".format(obj) - return "Error!" - - _options_docstr = _tabulate_options(_options_docstr.strip(), obj) - - docstr = _usage_docstr + \ - "\nOptions:\n" + \ - _options_docstr + \ - "\nReturns:" + \ - _returns_docstr - - return docstr - - -def main(): - curdir = op.dirname(op.realpath(__file__)) - cli_doc_path = op.realpath(op.join(curdir, '..', DOCS_DIR, 'cli.md')) - - docs = '' - for method_name in sorted(Daemon.callable_methods.keys()): - method = Daemon.callable_methods[method_name] - docs += '## ' + method_name + "\n\n```text\n" + _doc(method) + "\n```\n\n" - - docs = "# LBRY Command Line Documentation\n\n" + docs - with open(cli_doc_path, 'w+') as f: - f.write(docs) - - -if __name__ == '__main__': - sys.exit(main()) diff --git a/scripts/gen_docs.py b/scripts/gen_docs.py index 1a076d299..57db05c23 100644 --- a/scripts/gen_docs.py +++ b/scripts/gen_docs.py @@ -1,25 +1,197 @@ -import os.path as op +# -*- coding: utf-8 -*- + +# Generate docs: python gen_api_docs.py +# See docs: pip install mkdocs; mkdocs serve +# Push docs: mkdocs build + +import re +import inspect import subprocess +import os +from lbrynet.daemon.Daemon import Daemon try: - import mkdocs + import mkdocs except ImportError: - raise ImportError("mkdocs is not installed") + raise ImportError("mkdocs is not installed") try: - import tabulate + from tabulate import tabulate except ImportError: - raise ImportError("tabulate is not installed") + raise ImportError("tabulate is not installed") -try: - import gen_cli_docs - import gen_api_docs -except ImportError: - raise ImportError("Probably not inside the lbry's virtual environment or daemon not installed") +INDENT = " " +REQD_CMD_REGEX = r"\(.*?=<(?P.*?)>\)" +OPT_CMD_REGEX = r"\[.*?=<(?P.*?)>\]" +CMD_REGEX = r"--.*?(?P.*?)[=,\s,<]" +DOCS_DIR = "docs_build" -gen_cli_docs.main() -gen_api_docs.main() -cwd = op.dirname(op.realpath(__file__)) -cwd = op.realpath(op.join(cwd, "..")) -proc = subprocess.Popen("exec mkdocs build", cwd=cwd, shell=True) -proc.kill() + +def _cli_tabulate_options(_options_docstr, method): + _option_list = [] + for line in _options_docstr.splitlines(): + if (line.strip().startswith("--")): + # separates command name and description + parts = line.split(":", 1) + # separates command type(in brackets) and description + new_parts = parts[1].lstrip().split(" ", 1) + else: + parts = [line] + + # len will be 2 when there's cmd name and description + if len(parts) == 2: + _option_list.append([parts[0], ":", new_parts[0], new_parts[1]]) + # len will be 1 when there's continuation of multiline description in the next line + # check `blob_announce`'s `stream_hash` command + elif len(parts) == 1: + _option_list.append([None, None, None, parts[0]]) + else: + print "Error: Ill formatted doc string for {}".format(method) + print "Error causing line: {}".format(line) + + # tabulate to make the options look pretty + _options_docstr_no_indent = tabulate(_option_list, missingval="", tablefmt="plain") + + # Indent the options properly + _options_docstr = "" + for line in _options_docstr_no_indent.splitlines(): + _options_docstr += INDENT + line + '\n' + + return _options_docstr + + +def _api_tabulate_options(_options_docstr, method, reqd_matches, opt_matches): + _option_list = [] + for line in _options_docstr.splitlines(): + if (line.strip().startswith("--")): + # separates command name and description + parts = line.split(":", 1) + + # checks whether the command is optional or required + # and remove the cli type formatting and convert to + # api style formatitng + match = re.findall(CMD_REGEX, parts[0]) + + if match[0] not in reqd_matches: + parts[0] = "'" + match[0] + "'" + else: + parts[0] = "'" + match[0] + "' (required)" + + # separates command type(in brackets) and description + new_parts = parts[1].lstrip().split(" ", 1) + else: + parts = [line] + + # len will be 2 when there's cmd name and description + if len(parts) == 2: + _option_list.append([parts[0], ":", new_parts[0], new_parts[1]]) + # len will be 1 when there's continuation of multiline description in the next line + # check `blob_announce`'s `stream_hash` command + elif len(parts) == 1: + _option_list.append([None, None, None, parts[0]]) + else: + print "Error: Ill formatted doc string for {}".format(method) + print "Error causing line: {}".format(line) + + # tabulate to make the options look pretty + _options_docstr_no_indent = tabulate(_option_list, missingval="", tablefmt="plain") + + # tabulate to make the options look pretty + _options_docstr = "" + for line in _options_docstr_no_indent.splitlines(): + _options_docstr += INDENT + line + '\n' + + return _options_docstr + + +def _cli_doc(obj): + docstr = (inspect.getdoc(obj) or '').strip() + + try: + _usage_docstr, _docstr_after_options = docstr.split("Options:", 1) + _options_docstr, _returns_docstr = _docstr_after_options.split("Returns:", 1) + except(ValueError): + print "Error: Ill formatted doc string for {}".format(obj) + print "Please ensure that the docstring has all the three headings i.e. \"Usage:\"" + print "\"Options:\" and \"Returns:\" exactly as specified, including the colon" + return "Error!" + + try: + _options_docstr = _cli_tabulate_options(_options_docstr.strip(), obj) + except Exception as e: + print "Please make sure that the individual options are properly formatted" + print "It should be strictly of the format:" + print "--command_name= : (type) desc" + print e.message + + docstr = _usage_docstr + \ + "\nOptions:\n" + \ + _options_docstr + \ + "\nReturns:" + \ + _returns_docstr + + return docstr + + +def _api_doc(obj): + docstr = (inspect.getdoc(obj) or '').strip() + + try: + _desc, _docstr_after_desc = docstr.split("Usage:", 1) + _usage_docstr, _docstr_after_options = _docstr_after_desc.split("Options:", 1) + _options_docstr, _returns_docstr = _docstr_after_options.split("Returns:", 1) + except(ValueError): + print "Error: Ill formatted doc string for {}".format(obj) + print "Please ensure that the docstring has all the three headings i.e. \"Usage:\"" + print "\"Options:\" and \"Returns:\" exactly as specified, including the colon" + return "Error!" + + opt_matches = re.findall(OPT_CMD_REGEX, _usage_docstr) + reqd_matches = re.findall(REQD_CMD_REGEX, _usage_docstr) + + try: + _options_docstr = _api_tabulate_options(_options_docstr.strip(), obj, reqd_matches, opt_matches) + except Exception as e: + print "Please make sure that the individual options are properly formatted" + print "It should be strictly of the format:" + print "--command_name= : (type) desc" + print e.message + + docstr = _desc + \ + "Args:\n" + \ + _options_docstr + \ + "\nReturns:" + \ + _returns_docstr + + return docstr + + +def main(): + curdir = os.path.dirname(os.path.realpath(__file__)) + api_doc_path = os.path.realpath(os.path.join(curdir, '..', DOCS_DIR, 'index.md')) + cli_doc_path = os.path.realpath(os.path.join(curdir, '..', DOCS_DIR, 'cli.md')) + + _api_docs = '' + _cli_docs = '' + for method_name in sorted(Daemon.callable_methods.keys()): + method = Daemon.callable_methods[method_name] + _api_docs += '## ' + method_name + "\n\n```text\n" + _api_doc(method) + "\n```\n\n" + _cli_docs += '## ' + method_name + "\n\n```text\n" + _cli_doc(method) + "\n```\n\n" + + _api_docs = "# LBRY JSON-RPC API Documentation\n\n" + _api_docs + with open(api_doc_path, 'w+') as f: + f.write(_api_docs) + + _cli_docs = "# LBRY JSON-RPC API Documentation\n\n" + _cli_docs + with open(cli_doc_path, 'w+') as f: + f.write(_cli_docs) + + +if __name__ == '__main__': + cwd = os.path.dirname(os.path.realpath(__file__)) + cwd = os.path.realpath(os.path.join(cwd, "..")) + directory = os.path.join(cwd, "docs_build") + if not os.path.exists(directory): + os.makedirs(directory) + proc = subprocess.Popen("exec mkdocs build", cwd=cwd, shell=True) + proc.kill()