-V, --versiondisplay the version of Wget and exit
-h, --helpprint this help
-b, --backgroundgo to background after startup
-e, --execute=COMMANDexecute a `.wgetrc'-style command
-o, --output-file=FILElog messages to FILE
-a, --append-output=FILEappend messages to FILE
-d, --debugprint lots of debugging information
-q, --quietquiet (no output)
-v, --verbosebe verbose (this is the default)
-nv, --no-verboseturn off verboseness, without being quiet
--report-speed=TYPEoutput bandwidth as TYPE. TYPE can be bits
-i, --input-file=FILEdownload URLs found in local or external FILE
-t, --tries=NUMBERset number of retries to NUMBER (0 unlimits)
--retry-connrefusedretry even if connection is refused
--retry-on-host-errorconsider host errors as non-fatal, transient errors
--retry-on-http-error=ERRORScomma-separated list of HTTP errors to retry
-O, --output-document=FILEwrite documents to FILE
-nc, --no-clobberskip downloads that would download to existing files (overwriting them)
--no-netrcdon't try to obtain credentials from .netrc
-nd, --no-directoriesdon't create directories
-x, --force-directoriesforce creation of directories
-nH, --no-host-directoriesdon't create host directories
--protocol-directoriesuse protocol name in directories
-P, --directory-prefix=PREFIXsave files to PREFIX/..
--cut-dirs=NUMBERignore NUMBER remote directory components
--http-user=USERset http user to USER
--http-password=PASSset http password to PASS
--no-cachedisallow server-cached data
--default-page=NAMEchange the default page name (normally this is 'index.html')
-E, --adjust-extensionsave HTML/CSS documents with proper extensions
--ignore-lengthignore 'Content-Length' header field
--header=STRINGinsert STRING among the headers
--secure-protocol=PRchoose secure protocol, one of auto, SSLv2, SSLv3, TLSv1, TLSv1_1, TLSv1_2, TLSv1_3 and PFS
--https-onlyonly follow secure HTTPS links
--no-check-certificatedon't validate the server's certificate
--certificate=FILEclient certificate file
--certificate-type=TYPEclient certificate type, PEM or DER
--private-key=FILEprivate key file
--private-key-type=TYPEprivate key type, PEM or DER
--ciphers=STRSet the priority string (GnuTLS) or cipher list string (OpenSSL) directly. Use with care. This option overrides --secure-protocol
--no-hstsdisable HSTS
--hsts-filepath of HSTS database (will override default)
--ftp-user=USERset ftp user to USER
--ftp-password=PASSset ftp password to PASS
--no-remove-listingdon't remove '.listing' files
--no-globturn off FTP file name globbing
--no-passive-ftpdisable the "passive" transfer mode
--preserve-permissionspreserve remote file permissions
--retr-symlinkswhen recursing, get linked-to files (not dir)
--ftps-implicituse implicit FTPS (default port is 990)
--ftps-resume-sslresume the SSL/TLS session started in the control connection when opening a data connection
--ftps-clear-data-connectioncipher the control channel only; all the data will be in plaintext
--ftps-fallback-to-ftpfall back to FTP if FTPS is not supported in the target server
--warc-file=FILENAMEsave request/response data to a .warc.gz file
--warc-header=STRINGinsert STRING into the warcinfo record
--warc-max-size=NUMBERset maximum size of WARC files to NUMBER
--warc-cdxwrite CDX index files
--warc-dedup=FILENAMEdo not store records listed in this CDX file
--no-warc-compressiondo not compress WARC files with GZIP
--no-warc-digestsdo not calculate SHA1 digests
--no-warc-keep-logdo not store the log file in a WARC record
-r, --recursivespecify recursive download
-l, --level=NUMBERmaximum recursion depth (inf or 0 for infinite)
--delete-afterdelete files locally after downloading them
-k, --convert-linksmake links in downloaded HTML or CSS point to local files
--convert-file-onlyconvert the file part of the URLs only (usually known as the basename)
--backups=Nbefore writing file X, rotate up to N backup files
-K, --backup-convertedbefore converting file X, back up as X.orig
-A, --accept=LISTcomma-separated list of accepted extensions
-R, --reject=LISTcomma-separated list of rejected extensions
--accept-regex=REGEXregex matching accepted URLs
--reject-regex=REGEXregex matching rejected URLs
--regex-type=TYPEregex type (posix|pcre)
-D, --domains=LISTcomma-separated list of accepted domains
--exclude-domains=LISTcomma-separated list of rejected domains
--follow-ftpfollow FTP links from HTML documents