6.5.2 release commit
diff --git a/CHANGES b/CHANGES
index 7026b9d..a207dda 100644
--- a/CHANGES
+++ b/CHANGES
@@ -6,6 +6,14 @@
 
                                History of Changes
 
+Version 6.5.2
+
+Daniel (21 March 2000):
+- Paul Harrington <paul@pizza.org> quickly pointed out to me that 6.5.1
+  crashes hard. I upload 6.5.2 now as quickly as possible! The problem was
+  the -D adjustments in src/main.c, see also a separate 6.5.1-patch on the
+  web site.
+
 Version 6.5.1
 
 Daniel (20 March 2000):
diff --git a/configure.in b/configure.in
index ad24414..ad07a61 100644
--- a/configure.in
+++ b/configure.in
@@ -2,7 +2,7 @@
 dnl Process this file with autoconf to produce a configure script.
 AC_INIT(lib/urldata.h)
 AM_CONFIG_HEADER(config.h src/config.h)
-AM_INIT_AUTOMAKE(curl,"6.5.1")
+AM_INIT_AUTOMAKE(curl,"6.5.2")
 
 dnl Checks for programs.
 AC_PROG_CC
diff --git a/include/curl/curl.h b/include/curl/curl.h
index 6533a57..8a27c3e 100644
--- a/include/curl/curl.h
+++ b/include/curl/curl.h
@@ -418,7 +418,7 @@
 char *curl_version(void);
 
 /* This is the version number */
-#define LIBCURL_VERSION "6.5"
+#define LIBCURL_VERSION "6.5.2"
 
 /* linked-list structure for QUOTE */
 struct curl_slist {
diff --git a/lib/Makefile.in b/lib/Makefile.in
index 2b6f6c6..84be952 100644
--- a/lib/Makefile.in
+++ b/lib/Makefile.in
@@ -77,7 +77,7 @@
 noinst_LIBRARIES = libcurl.a
 
 # Some flags needed when trying to cause warnings ;-)
-CFLAGS = -g #-Wall -pedantic
+#CFLAGS = -g -Wall -pedantic
 
 INCLUDES = -I$(top_srcdir)/include
 
@@ -99,6 +99,7 @@
 speedcheck.o getdate.o download.o ldap.o ssluse.o version.o getenv.o \
 escape.o mprintf.o telnet.o getpass.o netrc.o writeout.o
 AR = ar
+CFLAGS = @CFLAGS@
 COMPILE = $(CC) $(DEFS) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS)
 CCLD = $(CC)
 LINK = $(CCLD) $(AM_CFLAGS) $(CFLAGS) $(LDFLAGS) -o $@
diff --git a/src/hugehelp.c b/src/hugehelp.c
index 04783b8..dde2486 100644
--- a/src/hugehelp.c
+++ b/src/hugehelp.c
@@ -9,693 +9,725 @@
 "                            | (__| |_| |  _ <| |___ \n"
 "                             \\___|\\___/|_| \\_\\_____|\n"
 "NAME\n"
-"     curl - get a URL with FTP, TELNET, LDAP, GOPHER, DICT, FILE,\n"
-"     HTTP or HTTPS syntax.\n"
+"       curl  -  get  a  URL with FTP, TELNET, LDAP, GOPHER, DICT,\n"
+"       FILE, HTTP or HTTPS syntax.\n"
 "\n"
 "SYNOPSIS\n"
-"     curl [options] url\n"
+"       curl [options] url\n"
 "\n"
 "DESCRIPTION\n"
-"     curl is a client to get documents/files from servers,  using\n"
-"     any  of  the supported protocols. The command is designed to\n"
-"     work without user interaction or any kind of  interactivity.\n"
+"       curl is a client  to  get  documents/files  from  servers,\n"
+"       using  any  of  the  supported  protocols.  The command is\n"
+"       designed to work without user interaction or any  kind  of\n"
+"       interactivity.\n"
 "\n"
-"     curl  offers  a busload of useful tricks like proxy support,\n"
-"     user authentication, ftp upload,  HTTP  post,  SSL  (https:)\n"
-"     connections, cookies, file transfer resume and more.\n"
+"       curl offers a busload of useful tricks like proxy support,\n"
+"       user authentication, ftp upload, HTTP post,  SSL  (https:)\n"
+"       connections, cookies, file transfer resume and more.\n"
 "\n"
 "URL\n"
-"     The URL syntax is protocol dependent. You'll find a detailed\n"
-"     description in RFC 2396.\n"
+"       The  URL  syntax  is  protocol  dependent.  You'll  find a\n"
+"       detailed description in RFC 2396.\n"
 "\n"
-"     You can specify multiple URLs or parts of  URLs  by  writing\n"
-"     part sets within braces as in:\n"
+"       You can specify multiple URLs or parts of URLs by  writing\n"
+"       part sets within braces as in:\n"
 "\n"
-"      http://site.{one,two,three}.com\n"
+"        http://site.{one,two,three}.com\n"
 "\n"
-"     or  you can get sequences of alphanumeric series by using []\n"
-"     as in:\n"
+"       or  you  can get sequences of alphanumeric series by using\n"
+"       [] as in:\n"
 "\n"
-"      ftp://ftp.numericals.com/file[1-100].txt\n"
-"      ftp://ftp.numericals.com/file[001-100].txt    (with leading\n"
-"     zeros)\n"
-"      ftp://ftp.letters.com/file[a-z].txt\n"
+"        ftp://ftp.numericals.com/file[1-100].txt\n"
+"        ftp://ftp.numericals.com/file[001-100].txt    (with lead-\n"
+"       ing zeros)\n"
+"        ftp://ftp.letters.com/file[a-z].txt\n"
 "\n"
-"     It  is possible to specify up to 9 sets or series for a URL,\n"
-"     but no nesting is supported at the moment:\n"
+"       It  is  possible  to  specify up to 9 sets or series for a\n"
+"       URL, but no nesting is supported at the moment:\n"
 "\n"
-"      http://www.any.org/archive[1996-1999]/vol­\n"
-"     ume[1-4]part{a,b,c,index}.html\n"
+"        http://www.any.org/archive[1996-1999]/vol-\n"
+"       ume[1-4]part{a,b,c,index}.html\n"
 "\n"
 "OPTIONS\n"
-"     -a/--append\n"
-"          (FTP) When used in a ftp upload, this will tell curl to\n"
-"          append to the target file instead of overwriting it. If\n"
-"          the file doesn't exist, it will be created.\n"
+"       -a/--append\n"
+"              (FTP)  When  used  in  a ftp upload, this will tell\n"
+"              curl to append to the target file instead of  over-\n"
+"              writing  it.  If the file doesn't exist, it will be\n"
+"              created.\n"
 "\n"
-"     -A/--user-agent <agent string>\n"
-"          (HTTP)  Specify  the  User-Agent  string to send to the\n"
-"          HTTP server. Some badly done CGIs fail if its  not  set\n"
-"          to \"Mozilla/4.0\".  To encode blanks in the string, sur­\n"
-"          round the string with single  quote  marks.   This  can\n"
-"          also be set with the -H/--header flag of course.\n"
-"     -b/--cookie <name=data>\n"
-"          (HTTP) Pass the data to the HTTP server as a cookie. It\n"
-"          is supposedly the data  previously  received  from  the\n"
-"          server  in a \"Set-Cookie:\" line.  The data should be in\n"
-"          the format \"NAME1=VALUE1; NAME2=VALUE2\".\n"
+"       -A/--user-agent <agent string>\n"
+"              (HTTP) Specify the User-Agent string to send to the\n"
+"              HTTP  server.  Some badly done CGIs fail if its not\n"
+"              set to \"Mozilla/4.0\".   To  encode  blanks  in  the\n"
+"              string,  surround  the  string  with  single  quote\n"
+"              marks.  This can also be set with  the  -H/--header\n"
+"              flag of course.\n"
+"       -b/--cookie <name=data>\n"
+"              (HTTP)  Pass  the  data  to  the  HTTP  server as a\n"
+"              cookie.  It  is  supposedly  the  data   previously\n"
+"              received  from  the server in a \"Set-Cookie:\" line.\n"
+"              The data should be  in  the  format  \"NAME1=VALUE1;\n"
+"              NAME2=VALUE2\".\n"
 "\n"
-"          If no '=' letter is used in the line, it is treated  as\n"
-"          a  filename  to  use  to  read previously stored cookie\n"
-"          lines from, which should be used  in  this  session  if\n"
-"          they  match.  Using  this  method  also  activates  the\n"
-"          \"cookie parser\" which will make  curl  record  incoming\n"
-"          cookies too, which may be handy if you're using this in\n"
-"          combination with the  -L/--location  option.  The  file\n"
-"          format of the file to read cookies from should be plain\n"
-"          HTTP headers or the netscape cookie file format.\n"
+"              If no '=' letter is used in the line, it is treated\n"
+"              as a filename to  use  to  read  previously  stored\n"
+"              cookie  lines  from,  which  should be used in this\n"
+"              session if they match. Using this method also acti-\n"
+"              vates  the  \"cookie  parser\"  which  will make curl\n"
+"              record incoming cookies too, which may be handy  if\n"
+"              you're   using   this   in   combination  with  the\n"
+"              -L/--location option. The file format of  the  file\n"
+"              to  read  cookies from should be plain HTTP headers\n"
+"              or the netscape cookie file format.\n"
 "\n"
-"          NOTE that the file specified with -b/--cookie  is  only\n"
-"          used  as  input. No cookies will be stored in the file.\n"
-"          To store cookies, save the HTTP headers to a file using\n"
-"          -D/--dump-header!\n"
+"              NOTE that the file specified  with  -b/--cookie  is\n"
+"              only  used  as  input. No cookies will be stored in\n"
+"              the file. To store cookies, save the  HTTP  headers\n"
+"              to a file using -D/--dump-header!\n"
 "\n"
-"     -B/--ftp-ascii\n"
-"          (FTP/LDAP)  Use ASCII transfer when getting an FTP file\n"
-"          or LDAP info. For FTP, this can  also  be  enforced  by\n"
-"          using an URL that ends with \";type=A\".\n"
+"       -B/--ftp-ascii\n"
+"              (FTP/LDAP)  Use  ASCII transfer when getting an FTP\n"
+"              file or LDAP  info.  For  FTP,  this  can  also  be\n"
+"              enforced  by using an URL that ends with \";type=A\".\n"
 "\n"
-"     -c/--continue\n"
-"          Continue/Resume   a   previous   file   transfer.  This\n"
-"          instructs curl to continue appending data on  the  file\n"
-"          where  it  was  previously  left, possibly because of a\n"
-"          broken connection to the server. There must be a  named\n"
-"          physical  file  to  append  to for this to work.  Note:\n"
-"          Upload resume is depening on a command named  SIZE  not\n"
-"          always present in all ftp servers! Upload resume is for\n"
-"          FTP only.  HTTP resume is only possible  with  HTTP/1.1\n"
-"          or later servers.\n"
+"       -c/--continue\n"
+"              Continue/Resume  a  previous  file  transfer.  This\n"
+"              instructs  curl  to  continue appending data on the\n"
+"              file where it was previously left, possibly because\n"
+"              of a broken connection to the server. There must be\n"
+"              a named physical file to  append  to  for  this  to\n"
+"              work.  Note: Upload resume is depening on a command\n"
+"              named SIZE not always present in all  ftp  servers!\n"
+"              Upload resume is for FTP only.  HTTP resume is only\n"
+"              possible with HTTP/1.1 or later servers.\n"
 "\n"
-"     -C/--continue-at <offset>\n"
-"          Continue/Resume  a  previous file transfer at the given\n"
-"          offset. The given offset is the exact number  of  bytes\n"
-"          that  will be skipped counted from the beginning of the\n"
-"          source file before it is transfered to the destination.\n"
-"          If  used with uploads, the ftp server command SIZE will\n"
-"          not be used by curl. Upload resume  is  for  FTP  only.\n"
-"          HTTP  resume  is  only  possible with HTTP/1.1 or later\n"
-"          servers.\n"
+"       -C/--continue-at <offset>\n"
+"              Continue/Resume a previous  file  transfer  at  the\n"
+"              given  offset. The given offset is the exact number\n"
+"              of bytes that will  be  skipped  counted  from  the\n"
+"              beginning  of  the  source file before it is trans-\n"
+"              fered to the destination.  If  used  with  uploads,\n"
+"              the  ftp  server  command  SIZE will not be used by\n"
+"              curl. Upload resume is for FTP only.   HTTP  resume\n"
+"              is only possible with HTTP/1.1 or later servers.\n"
 "\n"
-"     -d/--data <data>\n"
-"          (HTTP) Sends the specified data in a  POST  request  to\n"
-"          the  HTTP server. Note that the data is sent exactly as\n"
-"          specified  with  no  extra  processing.   The  data  is\n"
-"          expected  to  be \"url-encoded\". This will cause curl to\n"
-"          pass the data to  the  server  using  the  content-type\n"
-"          application/x-www-form-urlencoded. Compare to -F.\n"
+"       -d/--data <data>\n"
+"              (HTTP)  Sends  the specified data in a POST request\n"
+"              to the HTTP server. Note  that  the  data  is  sent\n"
+"              exactly as specified with no extra processing.  The\n"
+"              data is expected to  be  \"url-encoded\".  This  will\n"
+"              cause curl to pass the data to the server using the\n"
+"              content-type     application/x-www-form-urlencoded.\n"
+"              Compare to -F.\n"
 "\n"
-"          If  you  start  the  data  with  the letter @, the rest\n"
-"          should be a file name to read the data from,  or  -  if\n"
-"          you  want  curl  to read the data from stdin.  The con­\n"
-"          tents of the file must already be url-encoded.\n"
+"              If  you  start the data with the letter @, the rest\n"
+"              should be a file name to read the data from,  or  -\n"
+"              if  you want curl to read the data from stdin.  The\n"
+"              contents of the file must already be url-encoded.\n"
 "\n"
-"     -D/--dump-header <file>\n"
-"          (HTTP/FTP) Write the HTTP headers to this  file.  Write\n"
-"          the FTP file info to this file if -I/--head is used.\n"
+"       -D/--dump-header <file>\n"
+"              (HTTP/FTP) Write the HTTP  headers  to  this  file.\n"
+"              Write  the  FTP file info to this file if -I/--head\n"
+"              is used.\n"
 "\n"
-"          This  option is handy to use when you want to store the\n"
-"          cookies that a HTTP site  sends  to  you.  The  cookies\n"
-"          could then be read in a second curl invoke by using the\n"
-"          -b/--cookie option!\n"
+"              This option is handy to use when you want to  store\n"
+"              the  cookies  that  a  HTTP  site sends to you. The\n"
+"              cookies could then be read in a second curl  invoke\n"
+"              by using the -b/--cookie option!\n"
 "\n"
-"     -e/--referer <URL>\n"
-"          (HTTP) Sends the \"Referer Page\" information to the HTTP\n"
-"          server. Some badly done CGIs fail if it's not set. This\n"
-"          can also be set with the -H/--header flag of course.\n"
+"       -e/--referer <URL>\n"
+"              (HTTP)  Sends the \"Referer Page\" information to the\n"
+"              HTTP server. Some badly done CGIs fail if it's  not\n"
+"              set. This can also be set with the -H/--header flag\n"
+"              of course.\n"
 "\n"
-"     -E/--cert <certificate[:password]>\n"
-"          (HTTPS) Tells curl to  use  the  specified  certificate\n"
-"          file  when  getting  a file with HTTPS. The certificate\n"
-"          must be in PEM format.  If the optional password  isn't\n"
-"          specified, it will be queried for on the terminal. Note\n"
-"          that this certificate is the private key and  the  pri­\n"
-"          vate certificate concatenated!\n"
+"       -E/--cert <certificate[:password]>\n"
+"              (HTTPS) Tells curl to use the specified certificate\n"
+"              file  when  getting a file with HTTPS. The certifi-\n"
+"              cate must be in PEM format.  If the optional  pass-\n"
+"              word isn't specified, it will be queried for on the\n"
+"              terminal. Note that this certificate is the private\n"
+"              key and the private certificate concatenated!\n"
 "\n"
-"     -f/--fail\n"
-"          (HTTP)  Fail  silently  (no  output  at  all) on server\n"
-"          errors. This is mostly done like this to better  enable\n"
-"          scripts  etc  to  better  deal with failed attempts. In\n"
-"          normal cases when a HTTP server fails to deliver a doc­\n"
-"          ument,  it  returns  a  HTML document stating so (which\n"
-"          often also describes why and more). This flag will pre­\n"
-"          vent  curl  from  outputting  that  and  fail  silently\n"
-"          instead.\n"
+"       -f/--fail\n"
+"              (HTTP)  Fail  silently (no output at all) on server\n"
+"              errors. This is mostly done  like  this  to  better\n"
+"              enable  scripts  etc  to  better  deal  with failed\n"
+"              attempts. In normal cases when a HTTP server  fails\n"
+"              to  deliver  a document, it returns a HTML document\n"
+"              stating so (which  often  also  describes  why  and\n"
+"              more).  This flag will prevent curl from outputting\n"
+"              that and fail silently instead.\n"
 "\n"
-"     -F/--form <name=content>\n"
-"          (HTTP) This lets curl emulate a filled in form in which\n"
-"          a  user has pressed the submit button. This causes curl\n"
-"          to POST data using the content-type multipart/form-data\n"
-"          according  to RFC1867. This enables uploading of binary\n"
-"          files etc. To force the 'content' part to be read  from\n"
-"          a  file,  prefix the file name with an @ sign. Example,\n"
-"          to send your password file to the server, where  'pass­\n"
-"          word'   is   the   name  of  the  form-field  to  which\n"
-"          /etc/passwd will be the input:\n"
+"       -F/--form <name=content>\n"
+"              (HTTP) This lets curl emulate a filled in  form  in\n"
+"              which  a  user  has pressed the submit button. This\n"
+"              causes curl to POST  data  using  the  content-type\n"
+"              multipart/form-data   according  to  RFC1867.  This\n"
+"              enables uploading of binary files etc. To force the\n"
+"              'content'  part  to be read from a file, prefix the\n"
+"              file name with an @ sign.  Example,  to  send  your\n"
+"              password  file  to  the server, where 'password' is\n"
+"              the name of the  form-field  to  which  /etc/passwd\n"
+"              will be the input:\n"
+"              curl -F password=@/etc/passwd www.mypasswords.com\n"
 "\n"
-"          curl -F password=@/etc/passwd www.mypasswords.com\n"
-"          To read the file's content from stdin insted of a file,\n"
-"          use - where the file name should've been.\n"
+"              To  read  the file's content from stdin insted of a\n"
+"              file, use - where the file name should've been.\n"
 "\n"
-"     -h/--help\n"
-"          Usage help.\n"
+"       -h/--help\n"
+"              Usage help.\n"
 "\n"
-"     -H/--header <header>\n"
-"          (HTTP) Extra header to use when getting a web page. You\n"
-"          may specify any number of extra headers. Note  that  if\n"
-"          you  should  add a custom header that has the same name\n"
-"          as one of the internal ones curl would use, your exter­\n"
-"          nally  set  header will be used instead of the internal\n"
-"          one. This allows you to make even trickier  stuff  than\n"
-"          curl  would  normally do. You should not replace inter­\n"
-"          nally set headers without knowing perfectly  well  what\n"
-"          you're doing.\n"
+"       -H/--header <header>\n"
+"              (HTTP) Extra header to use when getting a web page.\n"
+"              You  may  specify any number of extra headers. Note\n"
+"              that if you should add a custom header that has the\n"
+"              same  name  as  one of the internal ones curl would\n"
+"              use,  your  externally  set  header  will  be  used\n"
+"              instead  of  the  internal  one. This allows you to\n"
+"              make even trickier stuff than curl  would  normally\n"
+"              do.  You  should not replace internally set headers\n"
+"              without knowing perfectly well what you're doing.\n"
 "\n"
-"     -i/--include\n"
-"          (HTTP) Include the HTTP-header in the output. The HTTP-\n"
-"          header includes things like server-name,  date  of  the\n"
-"          document, HTTP-version and more...\n"
+"       -i/--include\n"
+"              (HTTP) Include the HTTP-header in the  output.  The\n"
+"              HTTP-header  includes things like server-name, date\n"
+"              of the document, HTTP-version and more...\n"
 "\n"
-"     -I/--head\n"
-"          (HTTP/FTP)  Fetch  the  HTTP-header  only! HTTP-servers\n"
-"          feature the command HEAD which this uses to get nothing\n"
-"          but  the header of a document. When used on a FTP file,\n"
-"          curl displays the file size only.\n"
+"       -I/--head\n"
+"              (HTTP/FTP) Fetch the HTTP-header only! HTTP-servers\n"
+"              feature  the  command  HEAD  which this uses to get\n"
+"              nothing but the header of a document. When used  on\n"
+"              a FTP file, curl displays the file size only.\n"
 "\n"
-"     -K/--config <config file>\n"
-"          Specify which config file to read curl arguments  from.\n"
-"          The  config  file  is a text file in which command line\n"
-"          arguments can be written which then will be used as  if\n"
-"          they  were  written  on the actual command line. If the\n"
-"          first column of a config line is a '#'  character,  the\n"
-"          rest of the line will be treated as a comment.\n"
+"       -K/--config <config file>\n"
+"              Specify  which  config  file to read curl arguments\n"
+"              from. The config file is a text file in which  com-\n"
+"              mand  line arguments can be written which then will\n"
+"              be used as if they were written on the actual  com-\n"
+"              mand  line. If the first column of a config line is\n"
+"              a '#' character, the  rest  of  the  line  will  be\n"
+"              treated as a comment.\n"
 "\n"
-"          Specify  the filename as '-' to make curl read the file\n"
-"          from stdin.\n"
+"              Specify  the  filename as '-' to make curl read the\n"
+"              file from stdin.\n"
 "\n"
-"     -l/--list-only\n"
-"          (FTP) When listing an FTP directory, this switch forces\n"
-"          a  name-only  view.   Especially  useful if you want to\n"
-"          machine-parse the contents of an  FTP  directory  since\n"
-"          the  normal  directory view doesn't use a standard look\n"
-"          or format.\n"
+"       -l/--list-only\n"
+"              (FTP) When listing an FTP  directory,  this  switch\n"
+"              forces  a name-only view.  Especially useful if you\n"
+"              want to machine-parse the contents of an FTP direc-\n"
+"              tory  since the normal directory view doesn't use a\n"
+"              standard look or format.\n"
 "\n"
-"     -L/--location\n"
-"          (HTTP/HTTPS) If the server reports that  the  requested\n"
-"          page  has  a  different  location  (indicated  with the\n"
-"          header line Location:) this flag will let curl  attempt\n"
-"          to reattempt the get on the new place. If used together\n"
-"          with -i or -I, headers from all requested pages will be\n"
-"          shown.\n"
+"       -L/--location\n"
+"              (HTTP/HTTPS)  If  the  server  reports   that   the\n"
+"              requested  page has a different location (indicated\n"
+"              with the header line Location:) this flag will  let\n"
+"              curl attempt to reattempt the get on the new place.\n"
+"              If used together with -i or -I,  headers  from  all\n"
+"              requested pages will be shown.\n"
 "\n"
-"     -m/--max-time <seconds>\n"
-"          Maximum time in seconds that you allow the whole opera­\n"
-"          tion to take.  This is useful for preventing your batch\n"
-"          jobs  from  hanging  for  hours due to slow networks or\n"
-"          links going down.  This doesn't work properly in  win32\n"
-"          systems.\n"
+"       -m/--max-time <seconds>\n"
+"              Maximum  time  in  seconds that you allow the whole\n"
+"              operation to take.  This is useful  for  preventing\n"
+"              your  batch jobs from hanging for hours due to slow\n"
+"              networks or links going down.   This  doesn't  work\n"
+"              properly in win32 systems.\n"
 "\n"
-"     -M/--manual\n"
-"          Manual. Display the huge help text.\n"
+"       -M/--manual\n"
+"              Manual. Display the huge help text.\n"
 "\n"
-"     -n/--netrc\n"
-"          Makes  curl  scan  the  .netrc  file in the user's home\n"
-"          directory for login name and password.  This  is  typi­\n"
-"          cally  used  for  ftp  on unix. If used with http, curl\n"
-"          will  enable  user  authentication.  See  netrc(4)  for\n"
-"          details  on  the file format. Curl will not complain if\n"
-"          that file hasn't the right permissions (it  should  not\n"
-"          be  world nor group readable). The environment variable\n"
-"          \"HOME\" is used to find the home directory.\n"
+"       -n/--netrc\n"
+"              Makes  curl scan the .netrc file in the user's home\n"
+"              directory for login name and password. This is typ-\n"
+"              ically  used  for  ftp  on unix. If used with http,\n"
+"              curl will enable user authentication. See  netrc(5)\n"
+"              for  details on the file format. Curl will not com-\n"
+"              plain if that file hasn't the right permissions (it\n"
+"              should  not be world nor group readable). The envi-\n"
+"              ronment variable \"HOME\" is used to  find  the  home\n"
+"              directory.\n"
 "\n"
-"          A quick and very simple  example  of  how  to  setup  a\n"
-"          .netrc   to   allow   curl   to   ftp  to  the  machine\n"
-"          host.domain.com with user name\n"
+"              A  quick  and very simple example of how to setup a\n"
+"              .netrc  to  allow  curl  to  ftp  to  the   machine\n"
+"              host.domain.com with user name\n"
 "\n"
-"          machine host.domain.com user myself password secret\n"
+"              machine host.domain.com user myself password secret\n"
 "\n"
-"     -N/--no-buffer\n"
-"          Disables the buffering of the output stream. In  normal\n"
-"          work situations, curl will use a standard buffered out­\n"
-"          put stream that will have the effect that it will  out­\n"
-"          put  the  data  in chunks, not necessarily exactly when\n"
-"          the data arrives.  Using this option will disable  that\n"
-"          buffering.\n"
+"       -N/--no-buffer\n"
+"              Disables the buffering of  the  output  stream.  In\n"
+"              normal  work  situations,  curl will use a standard\n"
+"              buffered output stream that will  have  the  effect\n"
+"              that  it will output the data in chunks, not neces-\n"
+"              sarily exactly when the data arrives.   Using  this\n"
+"              option will disable that buffering.\n"
 "\n"
-"     -o/--output <file>\n"
-"          Write  output  to  <file> instead of stdout. If you are\n"
-"          using {} or [] to fetch multiple documents, you can use\n"
-"          #[num]  in  the <file> specifier. That variable will be\n"
-"          replaced with the current  string  for  the  URL  being\n"
-"          fetched. Like in:\n"
+"       -o/--output <file>\n"
+"              Write  output  to  <file> instead of stdout. If you\n"
+"              are using {} or [] to fetch multiple documents, you\n"
+"              can  use  '#'  followed  by  a number in the <file>\n"
+"              specifier. That variable will be replaced with  the\n"
+"              current string for the URL being fetched. Like in:\n"
 "\n"
-"            curl http://{one,two}.site.com -o \"file_#1.txt\"\n"
+"                curl http://{one,two}.site.com -o \"file_#1.txt\"\n"
 "\n"
-"          or use several variables like:\n"
+"              or use several variables like:\n"
+"\n"
+"                curl http://{site,host}.host[1-5].com -o \"#1_#2\"\n"
+"\n"
+"       -O/--remote-name\n"
+"              Write  output to a local file named like the remote\n"
+"              file we get. (Only the file part of the remote file\n"
+"              is used, the path is cut off.)\n"
+"\n"
+"       -P/--ftpport <address>\n"
+"              (FTP)  Reverses  the  initiator/listener roles when\n"
+"              connecting with ftp. This switch makes Curl use the\n"
+"              PORT  command  instead  of  PASV. In practice, PORT\n"
+"              tells the server to connect to the client's  speci-\n"
+"              fied  address  and port, while PASV asks the server\n"
+"              for an ip address and port to connect to. <address>\n"
+"              should be one of:\n"
 "\n"
-"            curl http://{site,host}.host[1-5].com -o \"#1_#2\"\n"
+"              interface   i.e \"eth0\" to specify which interface's\n"
+"                          IP address you want to use  (Unix only)\n"
 "\n"
-"     -O/--remote-name\n"
-"          Write output to a local file named like the remote file\n"
-"          we get. (Only the file part of the remote file is used,\n"
-"          the path is cut off.)\n"
+"              IP address  i.e  \"192.168.10.1\" to specify exact IP\n"
+"                          number\n"
 "\n"
-"     -P/--ftpport <address>\n"
-"          (FTP)  Reverses  the initiator/listener roles when con­\n"
-"          necting with ftp. This switch makes Curl use  the  PORT\n"
-"          command  instead  of  PASV. In practice, PORT tells the\n"
-"          server to connect to the client's specified address and\n"
-"          port,  while PASV asks the server for an ip address and\n"
-"          port to connect to. <address> should be one of:\n"
+"              host name   i.e \"my.host.domain\" to specify machine\n"
 "\n"
-"          interface   i.e \"eth0\" to specify which interface's  IP\n"
-"                      address you want to use  (Unix only)\n"
+"              -           (any  single-letter  string) to make it\n"
+"                          pick the machine's default\n"
 "\n"
-"          IP address  i.e \"192.168.10.1\" to specify exact IP num­\n"
-"                      ber\n"
+"       -q     If used as the first parameter on the command line,\n"
+"              the $HOME/.curlrc file will not be read and used as\n"
+"              a config file.\n"
 "\n"
-"          host name   i.e \"my.host.domain\" to specify machine\n"
+"       -Q/--quote <comand>\n"
+"              (FTP) Send an arbitrary command to the  remote  FTP\n"
+"              server,  by  using the QUOTE command of the server.\n"
+"              Not all servers support this command, and  the  set\n"
+"              of  QUOTE  commands are server specific! Quote com-\n"
+"              mands are sent BEFORE the transfer is taking place.\n"
+"              To  make  commands  take  place  after a successful\n"
+"              transfer, prefix them with  a  dash  '-'.  You  may\n"
+"              specify any amount of commands to be run before and\n"
+"              after the transfer. If the server  returns  failure\n"
+"              for  one of the commands, the entire operation will\n"
+"              be aborted.\n"
 "\n"
-"          -           (any single-letter string) to make it  pick\n"
-"                      the machine's default\n"
+"       -r/--range <range>\n"
+"              (HTTP/FTP) Retrieve a byte  range  (i.e  a  partial\n"
+"              document) from a HTTP/1.1 or FTP server. Ranges can\n"
+"              be specified in a number of ways.\n"
 "\n"
-"     -q   If used as the first parameter on the command line, the\n"
-"          $HOME/.curlrc file will not be read and used as a  con­\n"
-"          fig file.\n"
+"              0-499     specifies the first 500 bytes\n"
 "\n"
-"     -Q/--quote <comand>\n"
-"          (FTP)  Send  an  arbitrary  command  to  the remote FTP\n"
-"          server, by using the QUOTE command of the  server.  Not\n"
-"          all  servers support this command, and the set of QUOTE\n"
-"          commands are server specific! Quote commands  are  sent\n"
-"          BEFORE  the  transfer is taking place. To make commands\n"
-"          take place after a  successful  transfer,  prefix  them\n"
-"          with a dash '-'. You may specify any amount of commands\n"
-"          to be run before and after the transfer. If the  server\n"
-"          returns  failure  for  one  of the commands, the entire\n"
-"          operation will be aborted.\n"
+"              500-999   specifies the second 500 bytes\n"
 "\n"
-"     -r/--range <range>\n"
-"          (HTTP/FTP) Retrieve a byte range (i.e a  partial  docu­\n"
-"          ment)  from  a  HTTP/1.1  or  FTP server. Ranges can be\n"
-"          specified in a number of ways.\n"
+"              -500      specifies the last 500 bytes\n"
 "\n"
-"          0-499     specifies the first 500 bytes\n"
+"              9500      specifies the bytes from offset 9500  and\n"
+"                        forward\n"
 "\n"
-"          500-999   specifies the second 500 bytes\n"
+"              0-0,-1    specifies   the   first   and  last  byte\n"
+"                        only(*)(H)\n"
 "\n"
-"          -500      specifies the last 500 bytes\n"
+"              500-700,600-799\n"
+"                        specifies 300 bytes from offset 500(H)\n"
 "\n"
-"          9500      specifies the bytes from offset 9500 and for­\n"
-"                    ward\n"
+"              100-199,500-599\n"
+"                        specifies   two   separate   100    bytes\n"
+"                        ranges(*)(H)\n"
 "\n"
-"          0-0,-1    specifies the first and last byte only(*)(H)\n"
-"          500-700,600-799\n"
-"                    specifies 300 bytes from offset 500(H)\n"
+"       (*) = NOTE that this will cause the server to reply with a\n"
+"       multipart response!\n"
 "\n"
-"          100-199,500-599\n"
-"                    specifies two separate 100 bytes ranges(*)(H)\n"
+"       You should also be aware that many HTTP/1.1 servers do not\n"
+"       have this feature enabled, so that when you attempt to get\n"
+"       a range, you'll instead get the whole document.\n"
 "\n"
-"     (*) = NOTE that this will cause the server to reply  with  a\n"
-"     multipart response!\n"
+"       FTP range downloads only support the simple syntax 'start-\n"
+"       stop'  (optionally  with  one  of the numbers omitted). It\n"
+"       depends on the non-RFC command SIZE.\n"
 "\n"
-"     You  should  also be aware that many HTTP/1.1 servers do not\n"
-"     have this feature enabled, so that when you attempt to get a\n"
-"     range, you'll instead get the whole document.\n"
+"       -s/--silent\n"
+"              Silent mode. Don't show  progress  meter  or  error\n"
+"              messages.  Makes Curl mute.\n"
 "\n"
-"     FTP  range  downloads only support the simple syntax 'start-\n"
-"     stop' (optionally with  one  of  the  numbers  omitted).  It\n"
-"     depends on the non-RFC command SIZE.\n"
+"       -S/--show-error\n"
+"              When  used with -s it makes curl show error message\n"
+"              if it fails.\n"
 "\n"
-"     -s/--silent\n"
-"          Silent  mode.  Don't  show progress meter or error mes­\n"
-"          sages.  Makes Curl mute.\n"
+"       -t/--upload\n"
+"              Transfer the stdin data to the specified file. Curl\n"
+"              will read everything from stdin until EOF and store\n"
+"              with the supplied  name.  If  this  is  used  on  a\n"
+"              http(s) server, the PUT command will be used.\n"
 "\n"
-"     -S/--show-error\n"
-"          When used with -s it makes curl show error  message  if\n"
-"          it fails.\n"
+"       -T/--upload-file <file>\n"
+"              Like  -t,  but  this  transfers the specified local\n"
+"              file. If there is no file  part  in  the  specified\n"
+"              URL,  Curl  will  append  the local file name. NOTE\n"
+"              that you must use a trailing / on the  last  direc-\n"
+"              tory  to really prove to Curl that there is no file\n"
+"              name or curl will think that  your  last  directory\n"
+"              name is the remote file name to use. That will most\n"
+"              likely cause the upload operation to fail. If  this\n"
+"              is  used  on a http(s) server, the PUT command will\n"
+"              be used.\n"
 "\n"
-"     -t/--upload\n"
-"          Transfer  the  stdin  data  to the specified file. Curl\n"
-"          will read everything from stdin  until  EOF  and  store\n"
-"          with  the  supplied  name. If this is used on a http(s)\n"
-"          server, the PUT command will be used.\n"
+"       -u/--user <user:password>\n"
+"              Specify user and password to use when fetching. See\n"
+"              README.curl  for  detailed  examples  of how to use\n"
+"              this. If no password is specified,  curl  will  ask\n"
+"              for it interactively.\n"
 "\n"
-"     -T/--upload-file <file>\n"
-"          Like -t, but this transfers the specified  local  file.\n"
-"          If  there  is  no  file part in the specified URL, Curl\n"
-"          will append the local file name. NOTE that you must use\n"
-"          a  trailing  / on the last directory to really prove to\n"
-"          Curl that there is no file name or curl will think that\n"
-"          your  last  directory  name  is the remote file name to\n"
-"          use. That will most likely cause the  upload  operation\n"
-"          to  fail.  If this is used on a http(s) server, the PUT\n"
-"          command will be used.\n"
+"       -U/--proxy-user <user:password>\n"
+"              Specify   user   and  password  to  use  for  Proxy\n"
+"              authentication. If no password is  specified,  curl\n"
+"              will ask for it interactively.\n"
 "\n"
-"     -u/--user <user:password>\n"
-"          Specify user and password to  use  when  fetching.  See\n"
-"          README.curl  for  detailed examples of how to use this.\n"
-"          If no password is  specified,  curl  will  ask  for  it\n"
-"          interactively.\n"
+"       -v/--verbose\n"
+"              Makes  the  fetching more verbose/talkative. Mostly\n"
+"              usable for debugging. Lines starting with '>' means\n"
+"              data  sent by curl, '<' means data received by curl\n"
+"              that is hidden in normal cases and  lines  starting\n"
+"              with '*' means additional info provided by curl.\n"
 "\n"
-"     -U/--proxy-user <user:password>\n"
-"          Specify  user and password to use for Proxy authentica­\n"
-"          tion. If no password is specified, curl will ask for it\n"
-"          interactively.\n"
-"     -v/--verbose\n"
-"          Makes   the  fetching  more  verbose/talkative.  Mostly\n"
-"          usable for debugging. Lines  starting  with  '>'  means\n"
-"          data sent by curl, '<' means data received by curl that\n"
-"          is hidden in normal cases and lines starting  with  '*'\n"
-"          means additional info provided by curl.\n"
+"       -V/--version\n"
+"              Displays  the  full  version  of  curl, libcurl and\n"
+"              other 3rd party  libraries  linked  with  the  exe-\n"
+"              cutable.\n"
 "\n"
-"     -V/--version\n"
-"          Displays  the  full  version of curl, libcurl and other\n"
-"          3rd party libraries linked with the executable.\n"
+"       -w/--write-out <format>\n"
+"              Defines  what to display after a completed and suc-\n"
+"              cessful operation. The format is a string that  may\n"
+"              contain  plain  text mixed with any number of vari-\n"
+"              ables. The string can be specified as \"string\",  to\n"
+"              get  read  from  a  particular  file you specify it\n"
+"              \"@filename\" and to tell curl  to  read  the  format\n"
+"              from stdin you write \"@-\".\n"
 "\n"
-"     -w/--write-out <format>\n"
-"          Defines what to display after a completed and  success­\n"
-"          ful  operation. The format is a string that may contain\n"
-"          plain text mixed with  any  number  of  variables.  The\n"
-"          string can be specified as \"string\", to get read from a\n"
-"          particular file you specify it \"@filename\" and to  tell\n"
-"          curl to read the format from stdin you write \"@-\".\n"
+"              The  variables present in the output format will be\n"
+"              substituted by the value or text that  curl  thinks\n"
+"              fit,  as  described below. All variables are speci-\n"
+"              fied like %{variable_name} and to output a normal %\n"
+"              you  just write them like %%. You can output a new-\n"
+"              line by using \\n, a carrige return with  \\r  and  a\n"
+"              tab space with \\t.\n"
 "\n"
-"          The variables present in the output format will be sub­\n"
-"          stituted by the value or text that curl thinks fit,  as\n"
-"          described  below.  All  variables  are  specified  like\n"
-"          %{variable_name} and to output  a  normal  %  you  just\n"
-"          write  them  like %%. You can output a newline by using\n"
-"          \\n, a carrige return with \\r and a tab space with \\t.\n"
+"              NOTE:  The  %-letter  is  a  special  letter in the\n"
+"              win32-environment, where all occurrences of %  must\n"
+"              be doubled when using this option.\n"
 "\n"
-"          NOTE:  The  %-letter  is  a  special  letter   in   the\n"
-"          win32-environment,  where  all occurrences of % must be\n"
-"          doubled when using this option.\n"
+"              Available variables are at this point:\n"
 "\n"
-"          Available variables are at this point:\n"
+"              url_effective  The  URL that was fetched last. This\n"
+"                             is mostly meaningful if you've  told\n"
+"                             curl to follow location: headers.\n"
 "\n"
-"          url_effective  The URL that was fetched last.  This  is\n"
-"                         mostly meaningful if you've told curl to\n"
-"                         follow location: headers.\n"
+"              http_code      The numerical code that was found in\n"
+"                             the last retrieved HTTP(S) page.\n"
 "\n"
-"          http_code      The numerical code that was found in the\n"
-"                         last retrieved HTTP(S) page.\n"
+"              time_total     The total time, in seconds, that the\n"
+"                             full operation lasted. The time will\n"
+"                             be displayed with millisecond  reso-\n"
+"                             lution.\n"
 "\n"
-"          time_total     The  total  time,  in  seconds, that the\n"
-"                         full operation lasted. The time will  be\n"
-"                         displayed with millisecond resolution.\n"
+"              time_namelookup\n"
+"                             The  time,  in seconds, it took from\n"
+"                             the start until the  name  resolving\n"
+"                             was completed.\n"
+"              time_connect   The  time,  in seconds, it took from\n"
+"                             the start until the connect  to  the\n"
+"                             remote  host  (or  proxy)  was  com-\n"
+"                             pleted.\n"
 "\n"
-"          time_namelookup\n"
-"                         The  time,  in seconds, it took from the\n"
-"                         start until the name resolving was  com­\n"
-"                         pleted.\n"
+"              time_pretransfer\n"
+"                             The time, in seconds, it  took  from\n"
+"                             the start until the file transfer is\n"
+"                             just about to begin.  This  includes\n"
+"                             all  pre-transfer commands and nego-\n"
+"                             tiations that are  specific  to  the\n"
+"                             particular protocol(s) involved.\n"
 "\n"
-"          time_connect   The  time,  in seconds, it took from the\n"
-"                         start until the connect  to  the  remote\n"
-"                         host (or proxy) was completed.\n"
-"          time_pretransfer\n"
-"                         The  time,  in seconds, it took from the\n"
-"                         start until the file  transfer  is  just\n"
-"                         about  to  begin. This includes all pre-\n"
-"                         transfer commands and negotiations  that\n"
-"                         are  specific  to  the particular proto­\n"
-"                         col(s) involved.\n"
+"              size_download  The  total amount of bytes that were\n"
+"                             downloaded.\n"
 "\n"
-"          size_download  The total  amount  of  bytes  that  were\n"
-"                         downloaded.\n"
+"              size_upload    The total amount of bytes that  were\n"
+"                             uploaded.\n"
 "\n"
-"          size_upload    The  total  amount  of  bytes  that were\n"
-"                         uploaded.\n"
+"              speed_download The average download speed that curl\n"
+"                             measured for the complete  download.\n"
 "\n"
-"          speed_download The average  download  speed  that  curl\n"
-"                         measured for the complete download.\n"
+"              speed_upload   The  average  upload speed that curl\n"
+"                             measured for the complete  download.\n"
 "\n"
-"          speed_upload   The  average upload speed that curl mea­\n"
-"                         sured for the complete download.\n"
+"       -x/--proxy <proxyhost[:port]>\n"
+"              Use  specified  proxy.  If  the  port number is not\n"
+"              specified, it is assumed at port 1080.\n"
 "\n"
-"     -x/--proxy <proxyhost[:port]>\n"
-"          Use specified proxy. If the port number is  not  speci­\n"
-"          fied, it is assumed at port 1080.\n"
+"       -X/--request <command>\n"
+"              (HTTP) Specifies a custom request to use when  com-\n"
+"              municating  with  the  HTTP  server.  The specified\n"
+"              request will be used instead of the  standard  GET.\n"
+"              Read  the  HTTP  1.1  specification for details and\n"
+"              explanations.\n"
 "\n"
-"     -X/--request <command>\n"
-"          (HTTP)  Specifies a custom request to use when communi­\n"
-"          cating with the HTTP  server.   The  specified  request\n"
-"          will be used instead of the standard GET. Read the HTTP\n"
-"          1.1 specification for details and explanations.\n"
+"              (FTP) Specifies a custom FTP command to use instead\n"
+"              of LIST when doing file lists with ftp.\n"
 "\n"
-"          (FTP) Specifies a custom FTP command to use instead  of\n"
-"          LIST when doing file lists with ftp.\n"
+"       -y/--speed-time <time>\n"
+"              If  a download is slower than speed-limit bytes per\n"
+"              second during a  speed-time  period,  the  download\n"
+"              gets  aborted.  If  speed-time is used, the default\n"
+"              speed-limit will be 1 unless set with -y.\n"
 "\n"
-"     -y/--speed-time <time>\n"
-"          If a download is slower than speed-limit bytes per sec­\n"
-"          ond during  a  speed-time  period,  the  download  gets\n"
-"          aborted. If speed-time is used, the default speed-limit\n"
-"          will be 1 unless set with -y.\n"
+"       -Y/--speed-limit <speed>\n"
+"              If a download is slower than this given  speed,  in\n"
+"              bytes  per  second,  for speed-time seconds it gets\n"
+"              aborted. speed-time is set with -Y and is 30 if not\n"
+"              set.\n"
 "\n"
-"     -Y/--speed-limit <speed>\n"
-"          If a download is slower than this given speed, in bytes\n"
-"          per  second,  for  speed-time  seconds it gets aborted.\n"
-"          speed-time is set with -Y and is 30 if not set.\n"
+"       -z/--time-cond <date expression>\n"
+"              (HTTP) Request to get a file that has been modified\n"
+"              later than the given time and date, or one that has\n"
+"              been modified before that time. The date expression\n"
+"              can be all sorts of date strings or if  it  doesn't\n"
+"              match  any  internal ones, it tries to get the time\n"
+"              from a given file name instead! See the GNU date(1)\n"
+"              man page for date expression details.\n"
 "\n"
-"     -z/--time-cond <date expression>\n"
-"          (HTTP) Request to get a file  that  has  been  modified\n"
-"          later  than  the  given  time and date, or one that has\n"
-"          been modified before that time. The date expression can\n"
-"          be all sorts of date strings or if it doesn't match any\n"
-"          internal ones, it tries to get the time  from  a  given\n"
-"          file  name  instead!  See  the GNU date(1) man page for\n"
-"          date expression details.\n"
-"          Start the date expression with a dash (-)  to  make  it\n"
-"          request  for  a  document  that is older than the given\n"
-"          date/time, default is a document that is newer than the\n"
-"          specified date/time.\n"
+"              Start  the  date expression with a dash (-) to make\n"
+"              it request for a document that is  older  than  the\n"
+"              given  date/time,  default  is  a  document that is\n"
+"              newer than the specified date/time.\n"
 "\n"
-"     -3/--sslv3\n"
-"          (HTTPS) Forces curl to use SSL version 3 when negotiat­\n"
-"          ing with a remote SSL server.\n"
+"       -3/--sslv3\n"
+"              (HTTPS) Forces curl to use SSL version 3 when nego-\n"
+"              tiating with a remote SSL server.\n"
 "\n"
-"     -2/--sslv2\n"
-"          (HTTPS) Forces curl to use SSL version 2 when negotiat­\n"
-"          ing with a remote SSL server.\n"
+"       -2/--sslv2\n"
+"              (HTTPS) Forces curl to use SSL version 2 when nego-\n"
+"              tiating with a remote SSL server.\n"
 "\n"
-"     -#/--progress-bar\n"
-"          Make  curl  display  progress information as a progress\n"
-"          bar instead of the default statistics.\n"
+"       -#/--progress-bar\n"
+"              Make  curl  display  progress  information   as   a\n"
+"              progress bar instead of the default statistics.\n"
 "\n"
-"     --crlf\n"
-"          (FTP) Convert LF to CRLF  in  upload.  Useful  for  MVS\n"
-"          (OS/390).\n"
+"       --crlf (FTP)  Convert LF to CRLF in upload. Useful for MVS\n"
+"              (OS/390).\n"
 "\n"
-"     --stderr <file>\n"
-"          Redirect  all  writes  to  stderr to the specified file\n"
-"          instead. If the file name is a plain '-', it is instead\n"
-"          written to stdout. This option has no point when you're\n"
-"          using a shell with decent redirecting capabilities.\n"
+"       --stderr <file>\n"
+"              Redirect all writes to stderr to the specified file\n"
+"              instead.  If  the  file  name is a plain '-', it is\n"
+"              instead written to stdout. This option has no point\n"
+"              when  you're  using a shell with decent redirecting\n"
+"              capabilities.\n"
 "\n"
 "FILES\n"
-"     ~/.curlrc\n"
-"          Default config file.\n"
+"       ~/.curlrc\n"
+"              Default config file.\n"
 "\n"
 "ENVIRONMENT\n"
-"     HTTP_PROXY [protocol://]<host>[:port]\n"
-"          Sets proxy server to use for HTTP.\n"
+"       HTTP_PROXY [protocol://]<host>[:port]\n"
+"              Sets proxy server to use for HTTP.\n"
 "\n"
-"     HTTPS_PROXY [protocol://]<host>[:port]\n"
-"          Sets proxy server to use for HTTPS.\n"
+"       HTTPS_PROXY [protocol://]<host>[:port]\n"
+"              Sets proxy server to use for HTTPS.\n"
 "\n"
-"     FTP_PROXY [protocol://]<host>[:port]\n"
-"          Sets proxy server to use for FTP.\n"
+"       FTP_PROXY [protocol://]<host>[:port]\n"
+"              Sets proxy server to use for FTP.\n"
 "\n"
-"     GOPHER_PROXY [protocol://]<host>[:port]\n"
-"          Sets proxy server to use for GOPHER.\n"
+"       GOPHER_PROXY [protocol://]<host>[:port]\n"
+"              Sets proxy server to use for GOPHER.\n"
 "\n"
-"     ALL_PROXY [protocol://]<host>[:port]\n"
-"          Sets proxy server to use if no protocol-specific  proxy\n"
-"          is set.\n"
+"       ALL_PROXY [protocol://]<host>[:port]\n"
+"              Sets proxy server to use  if  no  protocol-specific\n"
+"              proxy is set.\n"
+"       NO_PROXY <comma-separated list of hosts>\n"
+"              list  of  host  names that shouldn't go through any\n"
+"              proxy. If set to a asterisk '*'  only,  it  matches\n"
+"              all hosts.\n"
 "\n"
-"     NO_PROXY <comma-separated list of hosts>\n"
-"          list of host names that shouldn't go through any proxy.\n"
-"          If set to a asterisk '*' only, it matches all hosts.\n"
-"     COLUMNS <integer>\n"
-"          The width of the terminal.  This variable only  affects\n"
-"          curl when the --progress-bar option is used.\n"
+"       COLUMNS <integer>\n"
+"              The  width  of  the  terminal.   This variable only\n"
+"              affects curl  when  the  --progress-bar  option  is\n"
+"              used.\n"
 "\n"
 "EXIT CODES\n"
-"     There exists a bunch of different error codes and their cor­\n"
-"     responding error messages that may appear during bad  condi­\n"
-"     tions. At the time of this writing, the exit codes are:\n"
+"       There  exists  a  bunch of different error codes and their\n"
+"       corresponding error messages that may  appear  during  bad\n"
+"       conditions.  At  the  time of this writing, the exit codes\n"
+"       are:\n"
 "\n"
-"     1    Unsupported protocol. This build of curl has no support\n"
-"          for this protocol.\n"
+"       1      Unsupported protocol. This build  of  curl  has  no\n"
+"              support for this protocol.\n"
 "\n"
-"     2    Failed to initialize.\n"
+"       2      Failed to initialize.\n"
 "\n"
-"     3    URL malformat. The syntax was not correct.\n"
+"       3      URL malformat. The syntax was not correct.\n"
 "\n"
-"     4    URL user malformatted. The user-part of the URL  syntax\n"
-"          was not correct.\n"
+"       4      URL  user  malformatted.  The  user-part of the URL\n"
+"              syntax was not correct.\n"
 "\n"
-"     5    Couldn't  resolve proxy. The given proxy host could not\n"
-"          be resolved.\n"
+"       5      Couldn't resolve proxy. The given proxy host  could\n"
+"              not be resolved.\n"
 "\n"
-"     6    Couldn't resolve host. The given remote  host  was  not\n"
-"          resolved.\n"
+"       6      Couldn't  resolve  host.  The given remote host was\n"
+"              not resolved.\n"
 "\n"
-"     7    Failed to connect to host.\n"
+"       7      Failed to connect to host.\n"
 "\n"
-"     8    FTP  weird  server  reply.  The  server  sent data curl\n"
-"          couldn't parse.\n"
+"       8      FTP weird server reply. The server sent  data  curl\n"
+"              couldn't parse.\n"
 "\n"
-"     9    FTP access denied. The server denied login.\n"
+"       9      FTP access denied. The server denied login.\n"
 "\n"
-"     10   FTP user/password incorrect. Either one  or  both  were\n"
-"          not accepted by the server.\n"
+"       10     FTP  user/password  incorrect.  Either  one or both\n"
+"              were not accepted by the server.\n"
 "\n"
-"     11   FTP  weird  PASS  reply.  Curl couldn't parse the reply\n"
-"          sent to the PASS request.\n"
+"       11     FTP weird PASS reply. Curl couldn't parse the reply\n"
+"              sent to the PASS request.\n"
 "\n"
-"     12   FTP weird USER reply. Curl  couldn't  parse  the  reply\n"
-"          sent to the USER request.\n"
+"       12     FTP weird USER reply. Curl couldn't parse the reply\n"
+"              sent to the USER request.\n"
 "\n"
-"     13   FTP  weird  PASV  reply,  Curl couldn't parse the reply\n"
-"          sent to the PASV request.\n"
+"       13     FTP weird PASV reply, Curl couldn't parse the reply\n"
+"              sent to the PASV request.\n"
 "\n"
-"     14   FTP weird 227 formay. Curl couldn't parse the  227-line\n"
-"          the server sent.\n"
+"       14     FTP  weird  227  formay.  Curl  couldn't  parse the\n"
+"              227-line the server sent.\n"
+"       15     FTP can't get host. Couldn't resolve the host IP we\n"
+"              got in the 227-line.\n"
 "\n"
-"     15   FTP can't get host. Couldn't resolve the host IP we got\n"
-"          in the 227-line.\n"
+"       16     FTP  can't  reconnect. Couldn't connect to the host\n"
+"              we got in the 227-line.\n"
 "\n"
-"     16   FTP can't reconnect. Couldn't connect to  the  host  we\n"
-"          got in the 227-line.\n"
-"     17   FTP  couldn't  set  binary.  Couldn't  change  transfer\n"
-"          method to binary.\n"
+"       17     FTP couldn't set binary. Couldn't  change  transfer\n"
+"              method to binary.\n"
 "\n"
-"     18   Partial file. Only a part of the file was transfered.\n"
+"       18     Partial  file.  Only  a part of the file was trans-\n"
+"              fered.\n"
 "\n"
-"     19   FTP couldn't RETR file. The RETR command failed.\n"
+"       19     FTP couldn't RETR file. The RETR command failed.\n"
 "\n"
-"     20   FTP write error. The transfer was reported bad  by  the\n"
-"          server.\n"
+"       20     FTP write error. The transfer was reported  bad  by\n"
+"              the server.\n"
 "\n"
-"     21   FTP  quote  error.  A quote command returned error from\n"
-"          the server.\n"
+"       21     FTP  quote  error.  A  quote command returned error\n"
+"              from the server.\n"
 "\n"
-"     22   HTTP not found. The requested page was not found.  This\n"
-"          return code only appears if --fail is used.\n"
+"       22     HTTP not found. The requested page was  not  found.\n"
+"              This return code only appears if --fail is used.\n"
 "\n"
-"     23   Write  error.  Curl  couldn't  write  data  to  a local\n"
-"          filesystem or similar.\n"
+"       23     Write  error.  Curl  couldn't write data to a local\n"
+"              filesystem or similar.\n"
 "\n"
-"     24   Malformat user. User name badly specified.\n"
+"       24     Malformat user. User name badly specified.\n"
 "\n"
-"     25   FTP couldn't STOR file.  The  server  denied  the  STOR\n"
-"          operation.\n"
+"       25     FTP couldn't STOR file. The server denied the  STOR\n"
+"              operation.\n"
 "\n"
-"     26   Read error. Various reading problems.\n"
+"       26     Read error. Various reading problems.\n"
 "\n"
-"     27   Out of memory. A memory allocation request failed.\n"
+"       27     Out  of memory. A memory allocation request failed.\n"
 "\n"
-"     28   Operation  timeout.  The  specified time-out period was\n"
-"          reached according to the conditions.\n"
+"       28     Operation timeout. The  specified  time-out  period\n"
+"              was reached according to the conditions.\n"
 "\n"
-"     29   FTP couldn't set ASCII. The server returned an  unknown\n"
-"          reply.\n"
+"       29     FTP  couldn't  set  ASCII.  The  server returned an\n"
+"              unknown reply.\n"
 "\n"
-"     30   FTP PORT failed. The PORT command failed.\n"
+"       30     FTP PORT failed. The PORT command failed.\n"
 "\n"
-"     31   FTP couldn't use REST. The REST command failed.\n"
+"       31     FTP couldn't use REST. The REST command failed.\n"
 "\n"
-"     32   FTP  couldn't  use  SIZE.  The SIZE command failed. The\n"
-"          command is an extension to the original  FTP  spec  RFC\n"
-"          959.\n"
+"       32     FTP couldn't use SIZE. The SIZE command failed. The\n"
+"              command  is  an  extension to the original FTP spec\n"
+"              RFC 959.\n"
 "\n"
-"     33   HTTP range error. The range \"command\" didn't work.\n"
+"       33     HTTP range error. The range \"command\" didn't  work.\n"
 "\n"
-"     34   HTTP   post  error.  Internal  post-request  generation\n"
-"          error.\n"
+"       34     HTTP  post  error. Internal post-request generation\n"
+"              error.\n"
+"       35     SSL connect error. The SSL handshaking failed.\n"
 "\n"
-"     35   SSL connect error. The SSL handshaking failed.\n"
+"       36     FTP bad download resume. Couldn't continue an  ear-\n"
+"              lier aborted download.\n"
 "\n"
-"     36   FTP bad download resume. Couldn't continue  an  earlier\n"
-"          aborted download.\n"
-"     37   FILE  couldn't read file. Failed to open the file. Per­\n"
-"          missions?\n"
+"       37     FILE  couldn't  read file. Failed to open the file.\n"
+"              Permissions?\n"
 "\n"
-"     38   LDAP cannot bind. LDAP bind operation failed.\n"
+"       38     LDAP cannot bind. LDAP bind operation failed.\n"
 "\n"
-"     39   LDAP search failed.\n"
+"       39     LDAP search failed.\n"
 "\n"
-"     40   Library not found. The LDAP library was not found.\n"
+"       40     Library not found. The LDAP library was not  found.\n"
 "\n"
-"     41   Function not found. A required LDAP  function  was  not\n"
-"          found.\n"
+"       41     Function  not  found.  A required LDAP function was\n"
+"              not found.\n"
 "\n"
-"     XX   There  will  appear  more  error  codes  here in future\n"
-"          releases. The existing ones are meant to never  change.\n"
+"       XX     There will appear more error codes here  in  future\n"
+"              releases.  The  existing  ones  are  meant to never\n"
+"              change.\n"
 "\n"
 "BUGS\n"
-"     If  you do find any (or have other suggestions), mail Daniel\n"
-"     Stenberg <Daniel.Stenberg@haxx.nu>.\n"
+"       If you do find  any  (or  have  other  suggestions),  mail\n"
+"       Daniel Stenberg <Daniel.Stenberg@haxx.nu>.\n"
 "\n"
 "AUTHORS / CONTRIBUTORS\n"
-"      - Daniel Stenberg <Daniel.Stenberg@haxx.nu>\n"
-"      - Rafael Sagula <sagula@inf.ufrgs.br>\n"
-"      - Sampo Kellomaki <sampo@iki.fi>\n"
-"      - Linas Vepstas <linas@linas.org>\n"
-"      - Bjorn Reese <breese@mail1.stofanet.dk>\n"
-"      - Johan Anderson <johan@homemail.com>\n"
-"      - Kjell Ericson <Kjell.Ericson@sth.frontec.se>\n"
-"      - Troy Engel <tengel@sonic.net>\n"
-"      - Ryan Nelson <ryan@inch.com>\n"
-"      - Bjorn Stenberg <Bjorn.Stenberg@sth.frontec.se>\n"
-"      - Angus Mackay <amackay@gus.ml.org>\n"
-"      - Eric Young <eay@cryptsoft.com>\n"
-"      - Simon Dick <simond@totally.irrelevant.org>\n"
-"      - Oren Tirosh <oren@monty.hishome.net>\n"
-"      - Steven G. Johnson <stevenj@alum.mit.edu>\n"
-"      - Gilbert Ramirez Jr. <gram@verdict.uthscsa.edu>\n"
-"      - Andrés García <ornalux@redestb.es>\n"
-"      - Douglas E. Wegscheid <wegscd@whirlpool.com>\n"
-"      - Mark Butler <butlerm@xmission.com>\n"
-"      - Eric Thelin <eric@generation-i.com>\n"
-"      - Marc Boucher <marc@mbsi.ca>\n"
-"      - Greg Onufer <Greg.Onufer@Eng.Sun.COM>\n"
-"      - Doug Kaufman <dkaufman@rahul.net>\n"
-"      - David Eriksson <david@2good.com>\n"
-"      - Ralph Beckmann <rabe@uni-paderborn.de>\n"
-"      - T. Yamada <tai@imasy.or.jp>\n"
-"      - Lars J. Aas <larsa@sim.no>\n"
-"      - Jörn Hartroth <Joern.Hartroth@telekom.de>\n"
-"      - Matthew Clarke <clamat@van.maves.ca>\n"
-"      - Linus Nielsen <Linus.Nielsen@haxx.nu>\n"
-"      - Felix von Leitner <felix@convergence.de>\n"
-"      - Dan Zitter <dzitter@zitter.net>\n"
-"      - Jongki Suwandi <Jongki.Suwandi@eng.sun.com>\n"
-"      - Chris Maltby <chris@aurema.com>\n"
-"      - Ron Zapp <rzapper@yahoo.com>\n"
-"      - Paul Marquis <pmarquis@iname.com>\n"
-"      - Ellis Pritchard <ellis@citria.com>\n"
-"      - Damien Adant <dams@usa.net>\n"
-"      - Chris <cbayliss@csc.come>\n"
+"        - Daniel Stenberg <Daniel.Stenberg@haxx.nu>\n"
+"        - Rafael Sagula <sagula@inf.ufrgs.br>\n"
+"        - Sampo Kellomaki <sampo@iki.fi>\n"
+"        - Linas Vepstas <linas@linas.org>\n"
+"        - Bjorn Reese <breese@mail1.stofanet.dk>\n"
+"        - Johan Anderson <johan@homemail.com>\n"
+"        - Kjell Ericson <Kjell.Ericson@haxx,nu>\n"
+"        - Troy Engel <tengel@sonic.net>\n"
+"        - Ryan Nelson <ryan@inch.com>\n"
+"        - Bjorn Stenberg <Bjorn.Stenberg@haxx.nu>\n"
+"        - Angus Mackay <amackay@gus.ml.org>\n"
+"        - Eric Young <eay@cryptsoft.com>\n"
+"        - Simon Dick <simond@totally.irrelevant.org>\n"
+"        - Oren Tirosh <oren@monty.hishome.net>\n"
+"        - Steven G. Johnson <stevenj@alum.mit.edu>\n"
+"        - Gilbert Ramirez Jr. <gram@verdict.uthscsa.edu>\n"
+"        - Andr's Garc'a <ornalux@redestb.es>\n"
+"        - Douglas E. Wegscheid <wegscd@whirlpool.com>\n"
+"        - Mark Butler <butlerm@xmission.com>\n"
+"        - Eric Thelin <eric@generation-i.com>\n"
+"        - Marc Boucher <marc@mbsi.ca>\n"
+"        - Greg Onufer <Greg.Onufer@Eng.Sun.COM>\n"
+"        - Doug Kaufman <dkaufman@rahul.net>\n"
+"        - David Eriksson <david@2good.com>\n"
+"        - Ralph Beckmann <rabe@uni-paderborn.de>\n"
+"        - T. Yamada <tai@imasy.or.jp>\n"
+"        - Lars J. Aas <larsa@sim.no>\n"
+"        - J\"rn Hartroth <Joern.Hartroth@telekom.de>\n"
+"        - Matthew Clarke <clamat@van.maves.ca>\n"
+"        - Linus Nielsen <Linus.Nielsen@haxx.nu>\n"
+"        - Felix von Leitner <felix@convergence.de>\n"
+"        - Dan Zitter <dzitter@zitter.net>\n"
+"        - Jongki Suwandi <Jongki.Suwandi@eng.sun.com>\n"
+"        - Chris Maltby <chris@aurema.com>\n"
+"        - Ron Zapp <rzapper@yahoo.com>\n"
+"        - Paul Marquis <pmarquis@iname.com>\n"
+"        - Ellis Pritchard <ellis@citria.com>\n"
+"        - Damien Adant <dams@usa.net>\n"
+"        - Chris <cbayliss@csc.come>\n"
+"        - Marco G. Salvagno <mgs@whiz.cjb.net>\n"
 "\n"
 "WWW\n"
-"     http://curl.haxx.nu\n"
+"       http://curl.haxx.nu\n"
 "\n"
 "FTP\n"
-"     ftp://ftp.sunet.se/pub/www/utilities/curl/\n"
+"       ftp://ftp.sunet.se/pub/www/utilities/curl/\n"
 "\n"
 "SEE ALSO\n"
-"     ftp(1), wget(1), snarf(1)\n"
+"       ftp(1), wget(1), snarf(1)\n"
 "\n"
 "LATEST VERSION\n"
 "\n"
@@ -821,33 +853,37 @@
 "\n"
 " FTP\n"
 "\n"
-"   Upload all data on stdin to a specified ftp site:\n"
+"  Upload all data on stdin to a specified ftp site:\n"
 "\n"
 "        curl -t ftp://ftp.upload.com/myfile\n"
 "\n"
-"   Upload data from a specified file, login with user and password:\n"
+"  Upload data from a specified file, login with user and password:\n"
 "\n"
 "        curl -T uploadfile -u user:passwd ftp://ftp.upload.com/myfile\n"
 "\n"
-"   Upload a local file to the remote site, and use the local file name remote\n"
-"   too:\n"
+"  Upload a local file to the remote site, and use the local file name remote\n"
+"  too:\n"
 " \n"
 "        curl -T uploadfile -u user:passwd ftp://ftp.upload.com/\n"
 "\n"
-"   NOTE: Curl is not currently supporing ftp upload through a proxy! The reason\n"
-"   for this is simply that proxies are seldomly configured to allow this and\n"
-"   that no author has supplied code that makes it possible!\n"
+"  Upload a local file to get appended to the remote file using ftp:\n"
+"\n"
+"        curl -T localfile -a ftp://ftp.upload.com/remotefile\n"
+"\n"
+"  NOTE: Curl does not support ftp upload through a proxy! The reason for this\n"
+"  is simply that proxies are seldomly configured to allow this and that no\n"
+"  author has supplied code that makes it possible!\n"
 "\n"
 " HTTP\n"
 "\n"
-"   Upload all data on stdin to a specified http site:\n"
+"  Upload all data on stdin to a specified http site:\n"
 "\n"
 "        curl -t http://www.upload.com/myfile\n"
 "\n"
-"   Note that the http server must've been configured to accept PUT before this\n"
-"   can be done successfully.\n"
+"  Note that the http server must've been configured to accept PUT before this\n"
+"  can be done successfully.\n"
 "\n"
-"   For other ways to do http data upload, see the POST section below.\n"
+"  For other ways to do http data upload, see the POST section below.\n"
 "\n"
 "VERBOSE / DEBUG\n"
 "\n"
@@ -1156,9 +1192,9 @@
 "\n"
 "HTTPS\n"
 "\n"
-"  Secure HTTP requires SSLeay to be installed and used when curl is built. If\n"
-"  that is done, curl is capable of retrieving and posting documents using the\n"
-"  HTTPS procotol.\n"
+"  Secure HTTP requires SSL libraries to be installed and used when curl is\n"
+"  built. If that is done, curl is capable of retrieving and posting documents\n"
+"  using the HTTPS procotol.\n"
 "\n"
 "  Example:\n"
 "\n"
@@ -1171,9 +1207,10 @@
 "  browsers (Netscape and MSEI both use the so called PKCS#12 format). If you\n"
 "  want curl to use the certificates you use with your (favourite) browser, you\n"
 "  may need to download/compile a converter that can convert your browser's\n"
-"  formatted certificates to PEM formatted ones. Dr Stephen N. Henson has\n"
-"  written a patch for SSLeay that adds this functionality. You can get his\n"
-"  patch (that requires an SSLeay installation) from his site at:\n"
+"  formatted certificates to PEM formatted ones. This kind of converter is\n"
+"  included in recent versions of OpenSSL, and for older versions Dr Stephen\n"
+"  N. Henson has written a patch for SSLeay that adds this functionality. You\n"
+"  can get his patch (that requires an SSLeay installation) from his site at:\n"
 "  http://www.drh-consultancy.demon.co.uk/\n"
 "\n"
 "  Example on how to automatically retrieve a document using a certificate with\n"
@@ -1300,6 +1337,34 @@
 "\n"
 "  The usage of the -x/--proxy flag overrides the environment variables.\n"
 "\n"
+"NETRC\n"
+"\n"
+"  Unix introduced the .netrc concept a long time ago. It is a way for a user\n"
+"  to specify name and password for commonly visited ftp sites in a file so\n"
+"  that you don't have to type them in each time you visit those sites. You\n"
+"  realize this is a big security risk if someone else gets hold of your\n"
+"  passwords, so therefor most unix programs won't read this file unless it is\n"
+"  only readable by yourself (curl doesn't care though).\n"
+"\n"
+"  Curl supports .netrc files if told so (using the -n/--netrc option). This is\n"
+"  not restricted to only ftp, but curl can use it for all protocols where\n"
+"  authentication is used.\n"
+"\n"
+"  A very simple .netrc file could look something like:\n"
+"\n"
+"        machine curl.haxx.nu login iamdaniel password mysecret\n"
+"\n"
+"CUSTOM OUTPUT\n"
+"\n"
+"  To better allow script programmers to get to know about the progress of\n"
+"  curl, the -w/--write-out option was introduced. Using this, you can specify\n"
+"  what information from the previous transfer you want to extract.\n"
+"\n"
+"  To display the amount of bytes downloaded together with some text and an\n"
+"  ending newline:\n"
+"\n"
+"        curl -w 'We downloaded %{size_download} bytes\\n' www.download.com\n"
+"\n"
 "MAILING LIST\n"
 "\n"
 "  We have an open mailing list to discuss curl, its development and things\n"
diff --git a/src/main.c b/src/main.c
index 3e1918b..2545cd0 100644
--- a/src/main.c
+++ b/src/main.c
@@ -1153,7 +1153,7 @@
                     URGTAG_CRLF, config.crlf,
                     URGTAG_QUOTE, config.quote,
                     URGTAG_POSTQUOTE, config.postquote,
-                    URGTAG_WRITEHEADER, &heads,
+                    URGTAG_WRITEHEADER, config.headerfile?&heads:NULL,
                     URGTAG_COOKIEFILE, config.cookiefile,
                     URGTAG_SSLVERSION, config.ssl_version,
                     URGTAG_TIMECONDITION, config.timecond,
@@ -1171,7 +1171,7 @@
     /* it wasn't directed to stdout or stderr so close the file! */
     fclose(config.errors);
 
-  if(!headerfilep && heads.stream)
+  if(config.headerfile && !headerfilep && heads.stream)
     fclose(heads.stream);
 
   if(urlbuffer)
diff --git a/src/version.h b/src/version.h
index fcbce52..7eb55d7 100644
--- a/src/version.h
+++ b/src/version.h
@@ -1,3 +1,3 @@
 #define CURL_NAME "curl"
-#define CURL_VERSION "6.5"
+#define CURL_VERSION "6.5.2"
 #define CURL_ID CURL_NAME " " CURL_VERSION " (" OS ") "