Commit 93d495a6 authored by pm22d12's avatar pm22d12
Browse files

harmonization of the fields of the different tabs

parent 9a046ae8
......@@ -400,10 +400,10 @@ def save_data_source(file_data, submitted_data):
if parent_loc.protocol == "Https_opensearch":
if parent_sel.find("directories") is not None:
parent_sel.remove(parent_sel.directories)
elif submitted_data['dcheck_config_ignore_directories_newer_than'] \
or submitted_data['dcheck_config_ignore_directories_older_than'] \
or submitted_data['dcheck_config_ignore_directories_regexp'] \
or submitted_data['dcheck_config_force_directories_regexp']:
elif submitted_data['ignore_directories_newer_than'] \
or submitted_data['ignore_directories_older_than'] \
or submitted_data['ignore_directories_regexp'] \
or submitted_data['include_directories_regexp']:
if parent_sel.find("directories") is None:
dummy = etree.SubElement(parent_sel, "directories")
......@@ -411,28 +411,28 @@ def save_data_source(file_data, submitted_data):
dummy = etree.SubElement(
parent_sel.directories, "ignore_newer_than")
parent_sel.directories.ignore_newer_than = submitted_data[
'dcheck_config_ignore_directories_newer_than']
'ignore_directories_newer_than']
if parent_sel.directories.find("ignore_modify_time_older_than") is None:
dummy = etree.SubElement(
parent_sel.directories, "ignore_modify_time_older_than")
parent_sel.directories.ignore_modify_time_older_than = submitted_data[
'dcheck_config_ignore_directories_older_than']
'ignore_directories_older_than']
if parent_sel.directories.find("ignore_regexp") is None:
dummy = etree.SubElement(parent_sel.directories, "ignore_regexp")
parent_sel.directories.ignore_regexp = submitted_data[
'dcheck_config_ignore_directories_regexp']
'ignore_directories_regexp']
if parent_sel.directories.find("regexp") is None:
dummy = etree.SubElement(parent_sel.directories, "regexp")
parent_sel.directories.regexp = submitted_data['dcheck_config_force_directories_regexp']
parent_sel.directories.regexp = submitted_data['include_directories_regexp']
elif parent_sel.find("directories") is not None:
parent_sel.remove(parent_sel.directories)
if submitted_data['dcheck_config_ignore_modify_time_older_than'] != "" \
or submitted_data['dcheck_config_ignore_files_regexp'] != "" \
or submitted_data['dcheck_config_force_files_regexp'] != "":
if submitted_data['ignore_files_modify_older_than'] != "" \
or submitted_data['ignore_files_regexp'] != "" \
or submitted_data['include_files_regexp'] != "":
if parent_sel.find("files") is None:
dummy = etree.SubElement(parent_sel, "files")
......@@ -440,15 +440,15 @@ def save_data_source(file_data, submitted_data):
dummy = etree.SubElement(
parent_sel.files, "ignore_modify_time_older_than")
parent_sel.files.ignore_modify_time_older_than = submitted_data[
'dcheck_config_ignore_modify_time_older_than']
'ignore_files_modify_older_than']
if parent_sel.files.find("ignore_regexp") is None:
dummy = etree.SubElement(parent_sel.files, "ignore_regexp")
parent_sel.files.ignore_regexp = submitted_data['dcheck_config_ignore_files_regexp']
parent_sel.files.ignore_regexp = submitted_data['ignore_files_regexp']
if parent_sel.files.find("regexp") is None:
dummy = etree.SubElement(parent_sel.files, "regexp")
parent_sel.files.regexp = submitted_data['dcheck_config_force_files_regexp']
parent_sel.files.regexp = submitted_data['include_files_regexp']
elif parent_sel.find("files") is not None:
parent_sel.remove(parent_sel.files)
......@@ -461,15 +461,15 @@ def save_data_source(file_data, submitted_data):
if parent_sel.opensearch.find("dataset") is None:
dummy = etree.SubElement(parent_sel.opensearch, "dataset")
parent_sel.opensearch.dataset = submitted_data['dcheck_config_opensearch_dataset']
parent_sel.opensearch.dataset = submitted_data['opensearch_dataset']
if parent_sel.opensearch.find("area") is None:
dummy = etree.SubElement(parent_sel.opensearch, "area")
parent_sel.opensearch.area = submitted_data['dcheck_config_opensearch_area']
parent_sel.opensearch.area = submitted_data['opensearch_area']
if parent_sel.opensearch.find("request_format") is None:
dummy = etree.SubElement(parent_sel.opensearch, "request_format")
parent_sel.opensearch.request_format = submitted_data['dcheck_config_opensearch_format']
parent_sel.opensearch.request_format = submitted_data['opensearch_format']
def save_data_destination(file_data, submitted_data):
......
......@@ -506,74 +506,76 @@ def new_queue(request):
def help(request):
map_help = {}
map_help["id"] = _("id.help")
map_help["direction"] = _('direction.help')
map_help["checksum"] = _('checksum.help')
map_help["compression_action"] = _('compression_action.help')
map_help["compression_type"] = _('compression_type.help')
# data_reader
map_help["type_remote"] = _('type_remote.help')
map_help["server_remote"] = _('server_remote.help')
map_help["port_remote"] = _('port_remote.help')
map_help["user_remote"] = _('user_remote.help')
map_help["passwd_remote"] = _('passwd_remote.help')
map_help["repos_remote"] = _('repos_remote.help')
map_help["data_reader"] = _('data_reader.help')
map_help["regexp_date"] = _('regexp_date.help')
map_help["date_format"] = _('date_format.help')
map_help["storage_path"] = _('storage_path.help')
# listing builder
map_help["dcheck_config_default_file_accept"] = _(
'dcheck_config_default_file_accept.help')
map_help["dcheck_config_ignore_directories_regexp"] = _(
'dcheck_config_ignore_directories_regexp.help')
map_help["dcheck_config_force_files_regexp"] = _(
'dcheck_config_force_files_regexp.help')
map_help["dcheck_config_smart_crawler_delta"] = _(
'dcheck_config_smart_crawler_delta.help')
map_help["dcheck_config_smart_crawler_directories_pattern"] = _(
'dcheck_config_smart_crawler_directories_pattern.help')
map_help["dcheck_config_smart_crawler_maxdate"] = _(
'dcheck_config_smart_crawler_maxdate.help')
map_help["dcheck_config_smart_crawler_mindate"] = _(
'dcheck_config_smart_crawler_mindate.help')
map_help["dcheck_config_regexp_date"] = _('dcheck_config_regexp_date.help')
map_help["dcheck_config_date_format"] = _('dcheck_config_date_format.help')
map_help["dreport_config"] = _('dreport_config.help')
# misc_parameter
map_help["listing_delay"] = _('listing_delay.help')
map_help["wait_before_download_delay"] = _(
'wait_before_download_delay.help')
map_help["max_activated_files_by_loop"] = _(
'max_activated_files_by_loop.help')
map_help["parallel"] = _('parallel.help')
map_help["nb_retry"] = _('nb_retry.help')
map_help["redownload"] = _('redownload.help')
map_help["include_files_regexp"] = _('include_files_regexp.help')
map_help["ignore_files_regexp"] = _('ignore_files_regexp.help')
map_help["ignore_files_modify_older_than"] = _('ignore_files_modify_older_than.help')
map_help["ignore_directories_regexp"] = _('ignore_directories_regexp.help')
map_help["ignore_directories_regexp"] = _('ignore_directories_regexp.help')
map_help["ignore_directories_newer_than"] = _('ignore_directories_newer_than.help')
map_help["ignore_directories_older_than"] = _('ignore_directories_older_than.help')
map_help["directories_pattern"] = _('directories_pattern.help')
map_help["directories_delta"] = _('directories_delta.help')
map_help["directories_maxdate"] = _('directories_maxdate.help')
map_help["directories_mindate"] = _('directories_mindate.help')
map_help["opensearch_dataset"] = _('opensearch_dataset.help')
map_help["opensearch_area"] = _('opensearch_area.help')
map_help["opensearch_format"] = _('opensearch_format.help')
map_help["repos_local"] = _('repos_local.help')
map_help["type_local"] = _('type_local.help')
map_help["repos_local_tree"] = _('repos_local_tree.help')
map_help["repos_local_SAFE"] = _('repos_local_SAFE.help')
map_help["checksum"] = _('checksum.help')
map_help["compression_type"] = _('compression_type.help')
map_help["compressionSubDir"] = _('compressionSubDir.help')
map_help["extraction_type"] = _('extraction_type.help')
map_help["extractionSubDir"] = _('extractionSubDir.help')
map_help["links_path"] = _('links_path.help')
map_help["file_group_name"] = _('file_group_name.help')
map_help["loop_delay"] = _('loop_delay.help')
map_help["wait_before_download_delay"] = _('wait_before_download_delay.help')
map_help["wait_between_downloads"] = _('wait_between_downloads.help')
map_help["check_provider_before_download"] = _(
'check_provider_before_download.help')
map_help["max_activated_files_by_loop"] = _('max_activated_files_by_loop.help')
map_help["parallel"] = _('parallel.help')
map_help["nb_retry"] = _('nb_retry.help')
map_help["check_provider_before_download"] = _('check_provider_before_download.help')
map_help["monitoring_enabled"] = _('monitoring_enabled.help')
map_help["wait_between_scan"] = _('wait_between_scan.help')
map_help["purge_scan_older_than"] = _('purge_scan_older_than.help')
map_help["keep_last_scan"] = _('keep_last_scan.help')
map_help["to_download_max_nbr"] = _('to_download_max_nbr.help')
map_help["to_download_max_nbr_cg"] = _('to_download_max_nbr_cg.help')
map_help["max_activated_flow"] = _('max_activated_flow.help')
# download_policies
map_help["redownload"] = _('redownload.help')
map_help["pattern_filters_activated"] = _('pattern_filters_activated.help')
map_help["pattern_filters_default"] = _('pattern_filters_default.help')
map_help["regexp_apply"] = _('regexp_apply.help')
map_help["regexp_action"] = _('regexp_action.help')
# remote_storage
map_help["type_remote"] = _('type_remote.help')
map_help["server_remote"] = _('server_remote.help')
map_help["port_remote"] = _('port_remote.help')
map_help["user_remote"] = _('user_remote.help')
map_help["passwd_remote"] = _('passwd_remote.help')
map_help["repos_remote"] = _('repos_remote.help')
map_help["max_activated_flow"] = _('max_activated_flow.help')
# local
map_help["type_local"] = _('type_local.help')
map_help["repos_local"] = _('repos_local.help')
# data_control
map_help["to_download_max_nbr"] = _('to_download_max_nbr.help')
map_help["ignore_before"] = _('ignore_before.help')
map_help["ignore_after"] = _('ignore_after.help')
map_help["ignore_older_than"] = _('ignore_older_than.help')
return HttpResponse(map_help)
......@@ -17,236 +17,210 @@ msgstr ""
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
#: apps/downloader_admin/views.py:343
#: templates/adminview/editdownload.html:785
msgid "id.help"
msgstr "Download identifiant. It must be unique."
#: apps/downloader_admin/views.py:344
#: templates/adminview/editdownload.html:586
msgid "direction.help"
msgstr "Direction is forced to download. Upload is not implemented."
#: templates/adminview/editdownload.html
#: apps/downloader_admin/views.py:353
#: templates/adminview/editdownload.html:410
msgid "storage_path.help"
msgstr "Format of the local storage tree, using Python meaning (see <a href='http://docs.python.org/2/library/datetime.html#strftime-and-strptime-behavior'>http://docs.python.org/2/library/datetime.html#strftime-and-strptime-behavior</a>)"
#: apps/downloader_admin/views.py:345
#: templates/adminview/editdownload.html:615
msgid "checksum.help"
msgstr "Activate checksum. MD5 requires an .md5 file (on provider side) for a data to be successfully downloaded"
#: apps/downloader_admin/views.py:346
#: templates/adminview/editdownload.html:725
msgid "compression_action.help"
msgstr "Activate data compression or uncompression after download. WARN : compression is not supported for the moment."
#: apps/downloader_admin/views.py:347
#: templates/adminview/editdownload.html:641
msgid "compression_type.help"
msgstr "Data compressed on provider side can be uncompressed locally after download. Choose the right uncompress method : zip, gz, bz2"
#: apps/downloader_admin/views.py:350
#: templates/adminview/editdownload.html:301
#: **** templates/adminview/editdownload.html
#: ======
#: ====== tab Source
#: ==========
msgid "type_remote.help"
msgstr "Select the data retrieval mode"
msgid "server_remote.help"
msgstr "Server name or IP address"
msgid "port_remote.help"
msgstr "Port number (only if not the default port)"
msgid "user_remote.help"
msgstr "user name (default anonymous)"
msgid "passwd_remote.help"
msgstr "password (default anonymous)"
msgid "repos_remote.help"
msgstr "Base repository data path to crawl"
#: ====== tab Date extraction
#: ==========
msgid "data_reader.help"
msgstr ""
"Metadata (datetime) extraction method is required in order to organize downloaded data under user-specific date-tree. Use regexp to fetch date from filenames, or 'No metadata extraction' to mirror the provider tree organisation (Note : for a spool local storage, all data are stored directly in the spool without any tree)"
"A metadata extraction method (date/time) is required to organize the downloaded data under a user-specific date tree.\n"
"Use regexp to retrieve the date of file names or global attribute name for files in NetCDF format \n"
"(Note: for local spool storage, all data is stored directly in the spool without a tree)."
#: apps/downloader_admin/views.py:351
#: templates/adminview/editdownload.html:379
msgid "regexp_date.help"
msgstr ""
"A python regex to match the date in the name file. The string corresponding "
"to the date must be between parenthesis"
"A python regex to match the date in the name file.\n"
"The string corresponding to the date must be between parenthesis"
#: apps/downloader_admin/views.py:352
#: templates/adminview/editdownload.html:396
msgid "date_format.help"
msgstr ""
"Format of the date in the name file. (Directive meaning : see <a href='http://docs.python.org/2/library/datetime.html#strftime-and-strptime-behavior'>http://docs.python.org/2/library/datetime.html#strftime-and-strptime-behavior</a>"
#: apps/downloader_admin/views.py:353
#: templates/adminview/editdownload.html:410
msgid "storage_path.help"
msgstr "Format of the local storage tree, using Python meaning (see <a href='http://docs.python.org/2/library/datetime.html#strftime-and-strptime-behavior'>http://docs.python.org/2/library/datetime.html#strftime-and-strptime-behavior</a>)"
#: apps/downloader_admin/views.py:356
#: templates/adminview/editdownload.html:564
msgid "dcheck_config_default_file_accept.help"
msgstr "Select all remote files available, no include regexp applied"
#: ====== tab Selection
#: ==========
msgid "redownload.help"
msgstr "If activated, each file can be downloaded several times, when its metadata are updated (size, mtime)"
#: ========== File filter
#: ================
msgid "include_files_regexp.help"
msgstr "Only file matching the following regexp will be selected for download"
msgid "ignore_files_regexp.help"
msgstr "All files matching the following regular expression will be excluded from download"
msgid "ignore_files_modify_older_than.help"
msgstr "All files whose modification date is less than the specified number of days will be excluded from the download."
#: apps/downloader_admin/views.py:357
#: templates/adminview/editdownload.html:467
msgid "dcheck_config_ignore_directories_regexp.help"
#: ========== Directory filter
#: ================
msgid "include_directories_regexp.help"
msgstr "Only directory matching the following regexp will be selected for download"
msgid "ignore_directories_regexp.help"
msgstr "Restrict remote directories to crawl using regexp"
#: apps/downloader_admin/views.py:358
#: templates/adminview/editdownload.html:448
msgid "dcheck_config_force_files_regexp.help"
msgstr "Only file matching the following regexp will be selected for download"
msgid "ignore_directories_newer_than.help"
msgstr "All folders whose modification date is more recent than the specified number of minutes"
" will be excluded from crawl."
#: apps/downloader_admin/views.py:359
#: templates/adminview/editdownload.html:486
msgid "dcheck_config_smart_crawler_delta.help"
msgstr "Number of days to crawl on provider (time is relative to downloader execution run). Try to use automatic detection of date tree to limit the remote crawl to just a few days instead of complete archive"
msgid "ignore_directories_older_than.help"
msgstr "All directories whose modification date is less than the specified number of days"
" will be excluded from the download."
#: apps/downloader_admin/views.py:360
#: templates/adminview/editdownload.html:497
msgid "dcheck_config_smart_crawler_directories_pattern.help"
msgstr "Date pattern of remote directories, to enable smart crawl based on interesting date"
#: ========== Browse advanced directories
#: ================
msgid "directories_pattern.help"
msgstr "Date pattern of remote directories, to enable smart crawl based on on an date range."
#: apps/downloader_admin/views.py:361
#: templates/adminview/editdownload.html:509
msgid "dcheck_config_smart_crawler_maxdate.help"
msgstr "When smartcrawler is enabled, this option enables to limit the date we won't crawl after"
msgid "directories_delta.help"
msgstr "Number of days of backlog to be analyzed.\n"
"Limit remote scanning to a few days instead of a full archive."
#: apps/downloader_admin/views.py:362
#: templates/adminview/editdownload.html:520
msgid "dcheck_config_smart_crawler_mindate.help"
msgstr "When smartcrawler is enabled, this option enables to limit the min date we will crawl (allows to ignore older data)"
msgid "directories_maxdate.help"
msgstr "When smartcrawler is enabled, end date of crawl (default today)."
#: apps/downloader_admin/views.py:363
#: templates/adminview/editdownload.html:530
msgid "dcheck_config_regexp_date.help"
msgstr "a python regex to match the date in the name file"
msgid "directories_mindate.help"
msgstr "When smartcrawler is enabled, start date of crawl (default maxdate - backlog)."
#: apps/downloader_admin/views.py:364
#: templates/adminview/editdownload.html:542
msgid "dcheck_config_date_format.help"
msgstr "filename date format"
#: ========== Opensearch filter
#: ================
msgid "opensearch_dataset.help"
msgstr "Selection of platform, instrument, date range, data type, etc."
#: apps/downloader_admin/views.py:366
#: templates/adminview/editdownload.html:555
msgid "dreport_config.help"
msgstr "TODO help dreport_config"
msgid "opensearch_area.help"
msgstr "Geographical area on which to query the available products."
#: apps/downloader_admin/views.py:369
#: templates/adminview/editdownload.html:798
msgid "listing_delay.help"
msgstr "Time between two crawler runs in seconds"
msgid "opensearch_format.help"
msgstr "Presentation options"
#: apps/downloader_admin/views.py:370
#: templates/adminview/editdownload.html:812
msgid "wait_before_download_delay.help"
msgstr "Time to wait before starting a file download after detection (to avoid downloading incomplete files on provider side)"
#: apps/downloader_admin/views.py:371
#: templates/adminview/editdownload.html:841
msgid "max_activated_files_by_loop.help"
msgstr "DO NOT MODIFY UNLESS YOU ARE SURE YOU NEED IT"
#: ====== tab Destination
#: ==========
msgid "repos_local.help"
msgstr "Local path where downloaded data will be stored"
#: apps/downloader_admin/views.py:372
#: templates/adminview/editdownload.html:854
msgid "parallel.help"
msgstr "Number of files to download in parallel when possible"
msgid "type_local.help"
msgstr "Select the local storage mode (organized archive or all in one folder)"
#: apps/downloader_admin/views.py:373
#: templates/adminview/editdownload.html:866
msgid "nb_retry.help"
msgstr "Number of tries to download a file when error occurs"
msgid "repos_local_tree.help"
msgstr "Pattern of the date subfolders used to organize the data files,"
" as a python date / time format string, when type is datatree"
#: apps/downloader_admin/views.py:374
#: templates/adminview/editdownload.html:878
msgid "loop_delay.help"
msgstr "DO NOT MODIFY UNLESS YOU ARE SURE YOU NEED IT"
msgid "repos_local_SAFE.help"
msgstr "Creating a subdirectory containing the SAFE data directory"
#: apps/downloader_admin/views.py:375
#: templates/adminview/editdownload.html:826
msgid "wait_between_downloads.help"
msgstr "Time to wait between two files to download (to limit network load)"
#: apps/downloader_admin/views.py:376
#: templates/adminview/editdownload.html:888
msgid "check_provider_before_download.help"
msgstr "Assert that the provider is available before starting crawl loop"
#: ====== tab Post processing
#: ==========
msgid "checksum.help"
msgstr "Activate checksum. MD5/SHA256 requires an .md5/.sha256 file (on provider side). SAFE require a xfdumanifest.xml file (included in the SAFE data format) "
#: apps/downloader_admin/views.py:379
#: templates/adminview/editdownload.html:909
msgid "redownload.help"
msgstr "If activated, each file can be downloaded several times, when its metadata are updated (size, mtime)"
msgid "compression_type.help"
msgstr "Enable decompression of compressed files after downloading.\n"
"Choose the right decompression method: zip, gz, bz2 and z"
#: apps/downloader_admin/views.py:380
#: templates/adminview/editdownload.html:925
msgid "pattern_filters_activated.help"
msgstr "TODO help pattern_filters_activated"
msgid "compressionSubDir.help"
msgstr "Adding a subdirectory as a container for uncompressed files."
#: apps/downloader_admin/views.py:381
msgid "pattern_filters_default.help"
msgstr "TODO help pattern_filters_default"
msgid "extraction_type.help"
msgstr "Enable extraction of 'TAR' archives after download (and after a possible compression)."
#: apps/downloader_admin/views.py:382
#: templates/adminview/editdownload.html:999
msgid "regexp_apply.help"
msgstr "TODO help regexp_apply"
msgid "extractionSubDir.help"
msgstr "Adding a subdirectory as a container for extracted files."
#: apps/downloader_admin/views.py:383
#: templates/adminview/editdownload.html:1057
msgid "regexp_action.help"
msgstr "TODO help "
msgid "links_path.help"
msgstr "Complementary spool directory containing links to downloaded files\n"
"If there are multiple directories, separate them with a ';'"
#: apps/downloader_admin/views.py:386 templates/adminview/editdownload.html:81
#: templates/adminview/editdownload.html:183
msgid "type_remote.help"
msgstr "Select the data retrieval mode"
msgid "file_group_name.help"
msgstr "Change the user group of a file.\n"
"Indicate the name of the new user group"
#: apps/downloader_admin/views.py:387
#: templates/adminview/editdownload.html:194
#: templates/adminview/editdownload.html:197
msgid "server_remote.help"
msgstr "Server name or IP address"
#: ====== tab Advanced
#: ==========
msgid "port_remote.help"
msgstr "Port number (only if not the default port)"
#: ========== Misc parameters
#: ================
#: apps/downloader_admin/views.py:366
#: templates/adminview/editdownload.html:555
msgid "loop_delay.help"
msgstr "DO NOT MODIFY UNLESS YOU ARE SURE YOU NEED IT"
#: apps/downloader_admin/views.py:388
#: templates/adminview/editdownload.html:208
msgid "user_remote.help"
msgstr "user name (default anonymous)"
msgid "wait_before_download_delay.help"
msgstr "Time to wait before starting a file download after detection (to avoid downloading incomplete files on provider side)"
#: apps/downloader_admin/views.py:389
#: templates/adminview/editdownload.html:219
msgid "passwd_remote.help"
msgstr ""password (default anonymous)"
msgid "wait_between_downloads.help"
msgstr "Time to wait between two files to download (to limit network load)"
#: apps/downloader_admin/views.py:390
#: templates/adminview/editdownload.html:241
msgid "repos_remote.help"
msgstr "Base repository data path to crawl"
msgid "max_activated_files_by_loop.help"
msgstr "DO NOT MODIFY UNLESS YOU ARE SURE YOU NEED IT"
#: apps/downloader_admin/views.py:391
#: templates/adminview/editdownload.html:1152
msgid "max_activated_flow.help"
msgstr ""
"It defines the maximum of connection on server. (excepted 2 connections for "
"check provider) "
msgid "parallel.help"
msgstr "Number of files to download in parallel when possible"
#: apps/downloader_admin/views.py:394
#: templates/adminview/editdownload.html:254
msgid "type_local.help"
msgstr "Select the local storage mode (organized archive or all in one folder)"
msgid "nb_retry.help"
msgstr "Number of tries to download a file when error occurs"
#: apps/downloader_admin/views.py:395
#: templates/adminview/editdownload.html:288
msgid "repos_local.help"
msgstr "Local path where downloaded data will be stored"
msgid "check_provider_before_download.help"
msgstr "Assert that the provider is available before starting crawl loop"
#: apps/downloader_admin/views.py:398
#: templates/adminview/editdownload.html:1095
msgid "to_download_max_nbr.help"
msgstr "Used to validate the listing of data to download generated after a remote crawl"
msgid "monitoring_enabled.help"
msgstr "Enable / disable sending monitoring information"
#: apps/downloader_admin/views.py:399
#: templates/adminview/editdownload.html:1108
msgid "ignore_before.help"
msgstr "Used to validate the listing of data to download generated after a remote crawl"
#: apps/downloader_admin/views.py:400
#: templates/adminview/editdownload.html:1122
msgid "ignore_after.help"
msgstr "Used to validate the listing of data to download generated after a remote crawl"
#: ========== Scanning / listing parameters
#: ================
msgid "wait_between_scan.help"
msgstr "Time between two crawler runs in seconds"
msgid "purge_scan_older_than.help"
msgstr "Number of days of conservation of scans"
#: apps/downloader_admin/views.py:401
#: templates/adminview/editdownload.html:1136
msgid "ignore_older_than.help"
msgid "keep_last_scan.help"
msgstr "Preserve or not the most recent scan"