Fix DecimalFormat max. significatnt digit getter

Getter always returns 6 for maximumSignificantDigits
diff --git a/README.chromium b/README.chromium
index 0889e4e..50ba7c5 100644
--- a/README.chromium
+++ b/README.chromium
@@ -276,3 +276,9 @@
   - upstream bug (fixed in 62.2-to-be)
     https://ssl.icu-project.org/trac/ticket/13851
 
+10. Max significant digit is always 6
+
+  - patches/nf_maxsig.patch
+  - upstream bug:
+    https://ssl.icu-project.org/trac/ticket/13852
+
diff --git a/patches/nf_maxsig.patch b/patches/nf_maxsig.patch
new file mode 100644
index 0000000..380b8e6
--- /dev/null
+++ b/patches/nf_maxsig.patch
@@ -0,0 +1,12 @@
+diff --git a/source/i18n/decimfmt.cpp b/source/i18n/decimfmt.cpp
+index a2638bb7..8ae773b7 100644
+--- a/source/i18n/decimfmt.cpp
++++ b/source/i18n/decimfmt.cpp
+@@ -1056,6 +1056,7 @@ UBool DecimalFormat::areSignificantDigitsUsed() const {
+ }
+ 
+ void DecimalFormat::setSignificantDigitsUsed(UBool useSignificantDigits) {
++    if (areSignificantDigitsUsed()) return;
+     // These are the default values from the old implementation.
+     int32_t minSig = useSignificantDigits ? 1 : -1;
+     int32_t maxSig = useSignificantDigits ? 6 : -1;
diff --git a/source/i18n/decimfmt.cpp b/source/i18n/decimfmt.cpp
index a2638bb..8ae773b 100644
--- a/source/i18n/decimfmt.cpp
+++ b/source/i18n/decimfmt.cpp
@@ -1056,6 +1056,7 @@
 }
 
 void DecimalFormat::setSignificantDigitsUsed(UBool useSignificantDigits) {
+    if (areSignificantDigitsUsed()) return;
     // These are the default values from the old implementation.
     int32_t minSig = useSignificantDigits ? 1 : -1;
     int32_t maxSig = useSignificantDigits ? 6 : -1;