diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml
index 0eb81e43cd..8a583187e2 100644
--- a/.github/workflows/deploy.yml
+++ b/.github/workflows/deploy.yml
@@ -47,7 +47,7 @@ jobs:
run: julia --project -e '
using Pkg; Pkg.instantiate();
using NodeJS; run(`$(npm_cmd()) install highlight.js`);
- using Franklin; optimize();
+ using Franklin; optimize(prerender=false);
cp(joinpath("__site", "feed.xml"), joinpath("__site", "index.xml"))'
- name: Deploy (preview)
if: github.event_name == 'pull_request' && github.repository == github.event.pull_request.head.repo.full_name # if this build is a PR build and the PR is NOT from a fork
diff --git a/_assets/blog/2021-1.6-highlights/julia-1.0-stacktrace.png b/_assets/blog/2021-1.6-highlights/julia-1.0-stacktrace.png
new file mode 100644
index 0000000000..02212be97e
Binary files /dev/null and b/_assets/blog/2021-1.6-highlights/julia-1.0-stacktrace.png differ
diff --git a/_assets/blog/2021-1.6-highlights/julia-1.6-stacktrace.png b/_assets/blog/2021-1.6-highlights/julia-1.6-stacktrace.png
new file mode 100644
index 0000000000..5e869912e7
Binary files /dev/null and b/_assets/blog/2021-1.6-highlights/julia-1.6-stacktrace.png differ
diff --git a/_css/franklin.css b/_css/franklin.css
index 70c1825a2b..e54e7a4413 100644
--- a/_css/franklin.css
+++ b/_css/franklin.css
@@ -71,3 +71,15 @@
background-color: #d8ffd8;
border-color: #404040;
}
+
+/* Specific colours for code highlighting */
+
+.hljs-metaj, .hljs-metas, .hljs-metap { font-weight: bold; }
+
+.hljs-meta { color: #b14e8f; } /* @... */
+.hljs-metaj { color: rgb(25, 179, 51); } /* julia> */
+.hljs-metas { color: red; } /* shell> */
+.hljs-metap { color: rgb(51, 131, 231); } /* pkg> */
+
+.hljs-string {color: #ca4418;}
+.hljs-subst { color: inherit;}
diff --git a/_layout/head.html b/_layout/head.html
index 03b31f5683..b13b1767ad 100644
--- a/_layout/head.html
+++ b/_layout/head.html
@@ -3,13 +3,15 @@
{{insert meta.html}}
- {{insert head_scripts.html}}
{{if hasmath}} {{insert head_katex.html }} {{end}}
{{if hascode}} {{insert head_highlight.html }} {{end}}
+
+ {{insert head_scripts.html}}
+
{{isdef title}} {{fill title}} {{end}}
{{ispage /blog/* /jsoc/gsoc/*}}
diff --git a/_libs/highlight/highlight.pack.js b/_libs/highlight/highlight.pack.js
index 4f6138fd3a..8f0f27ec0d 100644
--- a/_libs/highlight/highlight.pack.js
+++ b/_libs/highlight/highlight.pack.js
@@ -1,2 +1,2 @@
/*! highlight.js v9.17.1 | BSD3 License | git.io/hljslicense */
-!function(e){var n="object"==typeof window&&window||"object"==typeof self&&self;"undefined"==typeof exports||exports.nodeType?n&&(n.hljs=e({}),"function"==typeof define&&define.amd&&define([],function(){return n.hljs})):e(exports)}(function(a){var f=[],o=Object.keys,N={},g={},_=!0,n=/^(no-?highlight|plain|text)$/i,E=/\blang(?:uage)?-([\w-]+)\b/i,t=/((^(<[^>]+>|\t|)+|(?:\n)))/gm,r={case_insensitive:"cI",lexemes:"l",contains:"c",keywords:"k",subLanguage:"sL",className:"cN",begin:"b",beginKeywords:"bK",end:"e",endsWithParent:"eW",illegal:"i",excludeBegin:"eB",excludeEnd:"eE",returnBegin:"rB",returnEnd:"rE",variants:"v",IDENT_RE:"IR",UNDERSCORE_IDENT_RE:"UIR",NUMBER_RE:"NR",C_NUMBER_RE:"CNR",BINARY_NUMBER_RE:"BNR",RE_STARTERS_RE:"RSR",BACKSLASH_ESCAPE:"BE",APOS_STRING_MODE:"ASM",QUOTE_STRING_MODE:"QSM",PHRASAL_WORDS_MODE:"PWM",C_LINE_COMMENT_MODE:"CLCM",C_BLOCK_COMMENT_MODE:"CBCM",HASH_COMMENT_MODE:"HCM",NUMBER_MODE:"NM",C_NUMBER_MODE:"CNM",BINARY_NUMBER_MODE:"BNM",CSS_NUMBER_MODE:"CSSNM",REGEXP_MODE:"RM",TITLE_MODE:"TM",UNDERSCORE_TITLE_MODE:"UTM",COMMENT:"C",beginRe:"bR",endRe:"eR",illegalRe:"iR",lexemesRe:"lR",terminators:"t",terminator_end:"tE"},C="",m="Could not find the language '{}', did you forget to load/include a language module?",O={classPrefix:"hljs-",tabReplace:null,useBR:!1,languages:void 0},c="of and for in not or if then".split(" ");function B(e){return e.replace(/&/g,"&").replace(//g,">")}function d(e){return e.nodeName.toLowerCase()}function R(e){return n.test(e)}function i(e){var n,t={},r=Array.prototype.slice.call(arguments,1);for(n in e)t[n]=e[n];return r.forEach(function(e){for(n in e)t[n]=e[n]}),t}function p(e){var a=[];return function e(n,t){for(var r=n.firstChild;r;r=r.nextSibling)3===r.nodeType?t+=r.nodeValue.length:1===r.nodeType&&(a.push({event:"start",offset:t,node:r}),t=e(r,t),d(r).match(/br|hr|img|input/)||a.push({event:"stop",offset:t,node:r}));return t}(e,0),a}function v(e,n,t){var r=0,a="",i=[];function o(){return e.length&&n.length?e[0].offset!==n[0].offset?e[0].offset"}function l(e){a+=""+d(e)+">"}function u(e){("start"===e.event?c:l)(e.node)}for(;e.length||n.length;){var s=o();if(a+=B(t.substring(r,s[0].offset)),r=s[0].offset,s===e){for(i.reverse().forEach(l);u(s.splice(0,1)[0]),(s=o())===e&&s.length&&s[0].offset===r;);i.reverse().forEach(c)}else"start"===s[0].event?i.push(s[0].node):i.pop(),u(s.splice(0,1)[0])}return a+B(t.substr(r))}function l(n){return n.v&&!n.cached_variants&&(n.cached_variants=n.v.map(function(e){return i(n,{v:null},e)})),n.cached_variants?n.cached_variants:function e(n){return!!n&&(n.eW||e(n.starts))}(n)?[i(n,{starts:n.starts?i(n.starts):null})]:Object.isFrozen(n)?[i(n)]:[n]}function u(e){if(r&&!e.langApiRestored){for(var n in e.langApiRestored=!0,r)e[n]&&(e[r[n]]=e[n]);(e.c||[]).concat(e.v||[]).forEach(u)}}function M(n,t){var i={};return"string"==typeof n?r("keyword",n):o(n).forEach(function(e){r(e,n[e])}),i;function r(a,e){t&&(e=e.toLowerCase()),e.split(" ").forEach(function(e){var n,t,r=e.split("|");i[r[0]]=[a,(n=r[0],(t=r[1])?Number(t):function(e){return-1!=c.indexOf(e.toLowerCase())}(n)?0:1)]})}}function x(r){function s(e){return e&&e.source||e}function f(e,n){return new RegExp(s(e),"m"+(r.cI?"i":"")+(n?"g":""))}function a(a){var i,e,o={},c=[],l={},t=1;function n(e,n){o[t]=e,c.push([e,n]),t+=new RegExp(n.toString()+"|").exec("").length-1+1}for(var r=0;r')+n+(t?"":C)}function o(){R+=null!=E.sL?function(){var e="string"==typeof E.sL;if(e&&!N[E.sL])return B(p);var n=e?S(E.sL,p,!0,d[E.sL]):T(p,E.sL.length?E.sL:void 0);return 0")+'"');if("end"===n.type){var r=s(n);if(null!=r)return r}return p+=t,t.length}var g=D(n);if(!g)throw console.error(m.replace("{}",n)),new Error('Unknown language: "'+n+'"');x(g);var r,E=e||g,d={},R="";for(r=E;r!==g;r=r.parent)r.cN&&(R=c(r.cN,"",!0)+R);var p="",v=0;try{for(var M,b,h=0;E.t.lastIndex=h,M=E.t.exec(i);)b=t(i.substring(h,M.index),M),h=M.index+b;for(t(i.substr(h)),r=E;r.parent;r=r.parent)r.cN&&(R+=C);return{relevance:v,value:R,i:!1,language:n,top:E}}catch(e){if(e.message&&-1!==e.message.indexOf("Illegal"))return{i:!0,relevance:0,value:B(i)};if(_)return{relevance:0,value:B(i),language:n,top:E,errorRaised:e};throw e}}function T(t,e){e=e||O.languages||o(N);var r={relevance:0,value:B(t)},a=r;return e.filter(D).filter(L).forEach(function(e){var n=S(e,t,!1);n.language=e,n.relevance>a.relevance&&(a=n),n.relevance>r.relevance&&(a=r,r=n)}),a.language&&(r.second_best=a),r}function b(e){return O.tabReplace||O.useBR?e.replace(t,function(e,n){return O.useBR&&"\n"===e?" ":O.tabReplace?n.replace(/\t/g,O.tabReplace):""}):e}function s(e){var n,t,r,a,i,o,c,l,u,s,f=function(e){var n,t,r,a,i=e.className+" ";if(i+=e.parentNode?e.parentNode.className:"",t=E.exec(i)){var o=D(t[1]);return o||(console.warn(m.replace("{}",t[1])),console.warn("Falling back to no-highlight mode for this block.",e)),o?t[1]:"no-highlight"}for(n=0,r=(i=i.split(/\s+/)).length;n/g,"\n"):n=e,i=n.textContent,r=f?S(f,i,!0):T(i),(t=p(n)).length&&((a=document.createElement("div")).innerHTML=r.value,r.value=v(t,p(a),i)),r.value=b(r.value),e.innerHTML=r.value,e.className=(o=e.className,c=f,l=r.language,u=c?g[c]:l,s=[o.trim()],o.match(/\bhljs\b/)||s.push("hljs"),-1===o.indexOf(u)&&s.push(u),s.join(" ").trim()),e.result={language:r.language,re:r.relevance},r.second_best&&(e.second_best={language:r.second_best.language,re:r.second_best.relevance}))}function h(){if(!h.called){h.called=!0;var e=document.querySelectorAll("pre code");f.forEach.call(e,s)}}var w={disableAutodetect:!0};function D(e){return e=(e||"").toLowerCase(),N[e]||N[g[e]]}function L(e){var n=D(e);return n&&!n.disableAutodetect}return a.highlight=S,a.highlightAuto=T,a.fixMarkup=b,a.highlightBlock=s,a.configure=function(e){O=i(O,e)},a.initHighlighting=h,a.initHighlightingOnLoad=function(){window.addEventListener("DOMContentLoaded",h,!1),window.addEventListener("load",h,!1)},a.registerLanguage=function(n,e){var t;try{t=e(a)}catch(e){if(console.error("Language definition for '{}' could not be registered.".replace("{}",n)),!_)throw e;console.error(e),t=w}u(N[n]=t),t.rawDefinition=e.bind(null,a),t.aliases&&t.aliases.forEach(function(e){g[e]=n})},a.listLanguages=function(){return o(N)},a.getLanguage=D,a.requireLanguage=function(e){var n=D(e);if(n)return n;throw new Error("The '{}' language is required, but not loaded.".replace("{}",e))},a.autoDetection=L,a.inherit=i,a.debugMode=function(){_=!1},a.IR=a.IDENT_RE="[a-zA-Z]\\w*",a.UIR=a.UNDERSCORE_IDENT_RE="[a-zA-Z_]\\w*",a.NR=a.NUMBER_RE="\\b\\d+(\\.\\d+)?",a.CNR=a.C_NUMBER_RE="(-?)(\\b0[xX][a-fA-F0-9]+|(\\b\\d+(\\.\\d*)?|\\.\\d+)([eE][-+]?\\d+)?)",a.BNR=a.BINARY_NUMBER_RE="\\b(0b[01]+)",a.RSR=a.RE_STARTERS_RE="!|!=|!==|%|%=|&|&&|&=|\\*|\\*=|\\+|\\+=|,|-|-=|/=|/|:|;|<<|<<=|<=|<|===|==|=|>>>=|>>=|>=|>>>|>>|>|\\?|\\[|\\{|\\(|\\^|\\^=|\\||\\|=|\\|\\||~",a.BE=a.BACKSLASH_ESCAPE={b:"\\\\[\\s\\S]",relevance:0},a.ASM=a.APOS_STRING_MODE={cN:"string",b:"'",e:"'",i:"\\n",c:[a.BE]},a.QSM=a.QUOTE_STRING_MODE={cN:"string",b:'"',e:'"',i:"\\n",c:[a.BE]},a.PWM=a.PHRASAL_WORDS_MODE={b:/\b(a|an|the|are|I'm|isn't|don't|doesn't|won't|but|just|should|pretty|simply|enough|gonna|going|wtf|so|such|will|you|your|they|like|more)\b/},a.C=a.COMMENT=function(e,n,t){var r=a.inherit({cN:"comment",b:e,e:n,c:[]},t||{});return r.c.push(a.PWM),r.c.push({cN:"doctag",b:"(?:TODO|FIXME|NOTE|BUG|XXX):",relevance:0}),r},a.CLCM=a.C_LINE_COMMENT_MODE=a.C("//","$"),a.CBCM=a.C_BLOCK_COMMENT_MODE=a.C("/\\*","\\*/"),a.HCM=a.HASH_COMMENT_MODE=a.C("#","$"),a.NM=a.NUMBER_MODE={cN:"number",b:a.NR,relevance:0},a.CNM=a.C_NUMBER_MODE={cN:"number",b:a.CNR,relevance:0},a.BNM=a.BINARY_NUMBER_MODE={cN:"number",b:a.BNR,relevance:0},a.CSSNM=a.CSS_NUMBER_MODE={cN:"number",b:a.NR+"(%|em|ex|ch|rem|vw|vh|vmin|vmax|cm|mm|in|pt|pc|px|deg|grad|rad|turn|s|ms|Hz|kHz|dpi|dpcm|dppx)?",relevance:0},a.RM=a.REGEXP_MODE={cN:"regexp",b:/\//,e:/\/[gimuy]*/,i:/\n/,c:[a.BE,{b:/\[/,e:/\]/,relevance:0,c:[a.BE]}]},a.TM=a.TITLE_MODE={cN:"title",b:a.IR,relevance:0},a.UTM=a.UNDERSCORE_TITLE_MODE={cN:"title",b:a.UIR,relevance:0},a.METHOD_GUARD={b:"\\.\\s*"+a.UIR,relevance:0},[a.BE,a.ASM,a.QSM,a.PWM,a.C,a.CLCM,a.CBCM,a.HCM,a.NM,a.CNM,a.BNM,a.CSSNM,a.RM,a.TM,a.UTM,a.METHOD_GUARD].forEach(function(e){!function n(t){Object.freeze(t);var r="function"==typeof t;Object.getOwnPropertyNames(t).forEach(function(e){!t.hasOwnProperty(e)||null===t[e]||"object"!=typeof t[e]&&"function"!=typeof t[e]||r&&("caller"===e||"callee"===e||"arguments"===e)||Object.isFrozen(t[e])||n(t[e])});return t}(e)}),a});hljs.registerLanguage("bash",function(e){var t={cN:"variable",v:[{b:/\$[\w\d#@][\w\d_]*/},{b:/\$\{(.*?)}/}]},a={cN:"string",b:/"/,e:/"/,c:[e.BE,t,{cN:"variable",b:/\$\(/,e:/\)/,c:[e.BE]}]};return{aliases:["sh","zsh"],l:/\b-?[a-z\._]+\b/,k:{keyword:"if then else elif fi for while in do done case esac function",literal:"true false",built_in:"break cd continue eval exec exit export getopts hash pwd readonly return shift test times trap umask unset alias bind builtin caller command declare echo enable help let local logout mapfile printf read readarray source type typeset ulimit unalias set shopt autoload bg bindkey bye cap chdir clone comparguments compcall compctl compdescribe compfiles compgroups compquote comptags comptry compvalues dirs disable disown echotc echoti emulate fc fg float functions getcap getln history integer jobs kill limit log noglob popd print pushd pushln rehash sched setcap setopt stat suspend ttyctl unfunction unhash unlimit unsetopt vared wait whence where which zcompile zformat zftp zle zmodload zparseopts zprof zpty zregexparse zsocket zstyle ztcp",_:"-ne -eq -lt -gt -f -d -e -s -l -a"},c:[{cN:"meta",b:/^#![^\n]+sh\s*$/,relevance:10},{cN:"function",b:/\w[\w\d_]*\s*\(\s*\)\s*\{/,rB:!0,c:[e.inherit(e.TM,{b:/\w[\w\d_]*/})],relevance:0},e.HCM,a,{cN:"",b:/\\"/},{cN:"string",b:/'/,e:/'/},t]}});hljs.registerLanguage("ini",function(e){var b={cN:"string",c:[e.BE],v:[{b:"'''",e:"'''",relevance:10},{b:'"""',e:'"""',relevance:10},{b:'"',e:'"'},{b:"'",e:"'"}]};return{aliases:["toml"],cI:!0,i:/\S/,c:[e.C(";","$"),e.HCM,{cN:"section",b:/^\s*\[+/,e:/\]+/},{b:/^[a-z0-9\[\]_\.-]+\s*=\s*/,e:"$",rB:!0,c:[{cN:"attr",b:/[a-z0-9\[\]_\.-]+/},{b:/=/,eW:!0,relevance:0,c:[e.C(";","$"),e.HCM,{cN:"literal",b:/\bon|off|true|false|yes|no\b/},{cN:"variable",v:[{b:/\$[\w\d"][\w\d_]*/},{b:/\$\{(.*?)}/}]},b,{cN:"number",b:/([\+\-]+)?[\d]+_[\d_]+/},e.NM]}]}]}});hljs.registerLanguage("xml",function(e){var c={cN:"symbol",b:"&[a-z]+;|[0-9]+;|[a-f0-9]+;"},s={b:"\\s",c:[{cN:"meta-keyword",b:"#?[a-z_][a-z1-9_-]+",i:"\\n"}]},a=e.inherit(s,{b:"\\(",e:"\\)"}),t=e.inherit(e.ASM,{cN:"meta-string"}),l=e.inherit(e.QSM,{cN:"meta-string"}),r={eW:!0,i:/,relevance:0,c:[{cN:"attr",b:"[A-Za-z0-9\\._:-]+",relevance:0},{b:/=\s*/,relevance:0,c:[{cN:"string",endsParent:!0,v:[{b:/"/,e:/"/,c:[c]},{b:/'/,e:/'/,c:[c]},{b:/[^\s"'=<>`]+/}]}]}]};return{aliases:["html","xhtml","rss","atom","xjb","xsd","xsl","plist","wsf","svg"],cI:!0,c:[{cN:"meta",b:"",relevance:10,c:[s,l,t,a,{b:"\\[",e:"\\]",c:[{cN:"meta",b:"",c:[s,a,l,t]}]}]},e.C("\x3c!--","--\x3e",{relevance:10}),{b:"<\\!\\[CDATA\\[",e:"\\]\\]>",relevance:10},c,{cN:"meta",b:/<\?xml/,e:/\?>/,relevance:10},{b:/<\?(php)?/,e:/\?>/,sL:"php",c:[{b:"/\\*",e:"\\*/",skip:!0},{b:'b"',e:'"',skip:!0},{b:"b'",e:"'",skip:!0},e.inherit(e.ASM,{i:null,cN:null,c:null,skip:!0}),e.inherit(e.QSM,{i:null,cN:null,c:null,skip:!0})]},{cN:"tag",b:"",rE:!0,sL:["css","xml"]}},{cN:"tag",b:"
+~~~
+
+For packages that are being developed, given that their code will be changed by
+other mechanisms than Pkg, this new workflow won’t automatically avoid
+encountering the standard code-load time precompilation. However non-dev-ed
+dependencies of those packages will be kept ready to load, so top-level
+precompilation at load time should remain lower for dev-ed packages.
+
+## Compile time percentage
+
+_Ian Butterworth_
+
+A small change that should help understanding of one of Julia’s quirks for
+newcomers is that the timing macro `@time` and its verbose friend `@timev` now
+report if any of the reported time has been spent on compilation.[^1]
+
+```julia-repl
+julia> x = rand(10,10);
+
+julia> @time x * x;
+ 0.540600 seconds (2.35 M allocations: 126.526 MiB, 4.43% gc time, 99.94% compilation time)
+
+julia> @time x * x;
+ 0.000010 seconds (1 allocation: 896 bytes)
+```
+Given Julia’s Just In Time (JIT) / Just Ahead Of Time (JAOT) compilation, the
+first time code is run the compilation overhead is often substantial, with big
+speed improvements seen in subsequent calls. This change highlights that
+behavior, serving as both a reminder and a tool for rooting out unwanted
+compilation effort i.e. over-specialized code.
+
+[^1]: Note that in some cases the system will look inside the `@time` expression and compile some of the called code before execution of the top-level expression begins. When that happens, some compilation time will not be counted. To include this time you can run `@time @eval ...`
+
+## Eliminating needless recompilation
+
+_Tim Holy_
+
+One of Julia’s most powerful features is its extensibility: you can add new
+methods to previously-defined functions, and use previously-defined methods on
+new types. Sometimes, these new entities force Julia to recompile code to
+account for changes in dispatch. This happens in two steps: first, “outdated”
+code gets _invalidated_, marking it as unsuitable for use; second, as needed the
+code is again compiled from scratch taking account of the new methods and types.
+
+Earlier versions of Julia were somewhat conservative, and invalidated old code
+in some circumstances where there was no actual change in dispatch. Moreover,
+there were many places where Julia and its standard libraries were written in a
+way that defeated Julia’s type-inference. Because the compiler sometimes had to
+invalidate code just because a new method _might_ apply, any uncertainty about
+types magnifies the risk and frequency of invalidation. In older versions of
+Julia, the combination of these effects made invalidation widespread: just
+loading certain packages led to invalidation of up to 10% of Julia’s precompiled
+code. The delay for recompilation could sometimes make interactive sessions feel
+sluggish. When invalidation occurred in Julia’s package-loading code, it also
+delayed loading of the next package, contributing to long waits for `using
+SomePkg` when `SomePkg` depends on other packages.
+
+In 1.6, the scheme for invalidating old code has been made more accurate and
+selective. Moreover, Julia and its standard libraries received a thorough
+makeover to help type inference arrive at a concrete answer more often. The
+result is a leaner, faster Julia that is far more impervious to method
+invalidation, and feels considerably more responsive and nimble in interactive
+sessions. Related blog post: [Analyzing sources of compiler latency in Julia:
+method invalidations](https://julialang.org/blog/2020/08/invalidations/).
+
+## Compiler latency reduction
+
+_Jameson Nash and Jeff Bezanson_
+
+In addition to making our library code more compiler-friendly, we continue
+to try to speed up the compiler itself. This remains one of our main
+technical challenges. In this release there aren't any major
+breakthroughs, but we do have some modest improvements due to work on the
+method table data structure.
+
+Method specificity is a partial order, and prior to 1.6 we stored methods in
+sorted order. We also attempted to identify ambiguous methods on insertion,
+hoping to avoid repeating the work for each future query.
+Unfortunately, sorting a partial order requires quadratic time, and this time
+began to show up prominently during package loading (when a package's methods
+need to be inserted into the currently-active method tables).
+
+We improved things by making the process lazier, moving sorting and ambiguity
+detection into the algorithm for finding matching methods.
+This algorithm runs very often, so it was not at first intuitive that this
+change would help.
+But the key is that the vast majority of queries are for specific enough
+types that most possible matches can be eliminated easily, leaving
+many fewer inputs to the most expensive steps.
+
+The main visible improvement here is to package loading, adding a bit of
+extra speed on top of the gains from addressing invalidations.
+
+There has been substantial effort put into refining our inference quality
+characteristics, both in stopping analysis quickly when it is perceived to not
+be of benefit, and extracting more precise information when possible. Both
+sides of this can have significant benefit for complex code, such as plotting
+libraries, which branch over large numbers of different configuration options.
+
+Much of this benefit should simply be available without doing anything to your
+code except updating Julia version! To go even further, there is now also a
+general framework for profiling compilation times, for investigating what
+functions contribute most heavily to execution latency. This is described in
+more detail by others. But with each release, you may just find that an old
+code pattern, which you used to need to avoid for performance, now works great!
+
+We applied many micro-optimizations to several internal data-structures also.
+These again won't affect how your code works, but should improve how it
+performs dynamically. For example, `invokelatest` is now faster than `try`, and
+nearly as fast as dynamic dispatch. Several complex internal data-structures
+that were trees also became simple hash-tables, improving both their scaling
+performance and making them more cheaply thread-safe. This affects some key
+areas such as type allocations (`apply_type` and `tuple`), method optimization
+lookup (`MethodInstance`), and dispatch (`jl_apply_generic`).
+
+While we haven't yet reached our target performance levels (but, really, can we
+ever get that fast?), we hope this release is a great step forward. There's
+already other work completed in preparation for getting latency down even more
+in the future!
+
+## Tooling to help optimize packages for latency
+
+_Nathan Daly & Tim Holy_
+
+Julia 1.6, in conjunction with SnoopCompile v2.2.0 or higher, features new tools
+for compiler introspection, especially (but not exclusively) for type inference.
+Developers can use the new tools to profile type inference and determine how
+particular package implementation choices interact with compilation time. Early
+adopters have used these tools to eliminate anywhere from a few percent to the
+large majority of first-use latency.
+
+- Related blog post: [Tutorial on precompilation](https://julialang.org/blog/2021/01/precompile_tutorial/)
+- [SnoopCompile.jl documentation](https://timholy.github.io/SnoopCompile.jl/stable/)
+
+## Binary loading speedups
+
+_Elliot Saba & Mosè Giordano_
+
+Providing reliable, portable binaries to packages is a challenge that all
+packaging environments must face, and while Julia's strategy has always been to
+prioritize reliability and reproducibility over all other concerns, in the past
+it has come at a cost. Our solution to the problems of reliability and
+reproducibility was to more fully isolate installed binaries and cross-compile
+them ourselves using the [`BinaryBuilder.jl`](https://github.com/JuliaPackaging/BinaryBuilder.jl)
+framework. Libraries built from `BinaryBuilder.jl` are most often used through
+so-called JLL packages which provide a standardized API that Julia packages can
+use to access the provided binaries. This ease of use and reliability of
+installation resulted in _vastly_ increased load times as compared to the bad
+old days when Julia packages would blindly `dlopen()` libraries and load
+whatever libraries happened to be sitting on the library search path. To
+illustrate the issue, in Julia 1.4, loading the GTK+3 stack required **7
+seconds** when it used to take around **500ms** on the same machine. Through
+many months of hard work and careful investigation, we are pleased to report
+that the same stack of libraries now takes less than **200ms** to load when
+using Julia v1.6 on the same machine.
+
+The cause of this slowdown was multi-faceted and spread across many different
+layers of the Julia ecosystem. Part of the issue was general compiler latency,
+which has been a focus of the compiler team for some time now, as evidenced by
+the compiler latency reduction section in this blog post. Another major piece
+though was general overhead
+incurred by having so many small JLL packages providing bindings; there was
+significant overhead in the loading of each package. In particular, there was
+code inference, code generation and data-structure loading that needed to be
+eliminated if the JLL packages were to be lightweight enough to not affect
+overall load times. In our experiments, we found that one of the largest
+sources of package load times was in the deserialization of backedge
+information, the links from functions in `Base` back to our packages that would
+cause our functions to be recompiled if there was an invalidation effecting
+that `Base` function. As counter-intuitive as it may seem, simply using
+a large number of functions from `Base` can very quickly balloon the
+precompilation cache files for your package, causing an increase in loading
+time! While the increase itself is small, (`3-10ms` at the worst) when you are
+loading many dozens of JLL packages, this adds up quickly.
+
+Our work to slim JLL packages down resulted in the creation of a new package,
+[`JLLWrappers.jl`](https://github.com/JuliaPackaging/JLLWrappers.jl). This
+package provides macros that auto-generate the bindings necessary for a JLL
+package, and do so by using the minimum number of functions and data structures
+possible. By limiting the number of backedges and data structures, as well as
+centralizing the template pieces of code that each JLL package uses, we are
+able to not only vastly improve load times, but improve compile times as well!
+As an added bonus, improvements to JLL package APIs can now be made directly in
+`JLLWappers.jl` without needing to re-deploy hundreds of JLLs. Because these
+JLL packages only define a thin wrapper around simple, lightweight functions
+that load libraries and return paths and such, they do not benefit from the
+heavy optimization that most Julia code undergoes. One final piece of the
+optimization puzzle was therefore to disable optimizations and use the new
+[per-module optimization levels](https://julialang.org/blog/2020/08/julia-1.5-highlights/#per-module_optimization_levels)
+functionality to reduce the amount of time spent generating a very small
+amount of code, saving precious seconds.
+
+~~~
+
+~~~
+
+The interplay between compiler improvements and the benefits that `JLLWrappers`
+affords were [well-recorded](https://github.com/JuliaGraphics/Gtk.jl/issues/466#issuecomment-716058685)
+during the development process, and showcase a speedup of load times for the
+original, non-JLLWrapperized `GTK3_jll` package from its peak at `6.73` seconds
+on Julia v1.4 down to `2.34` seconds on Julia v1.6, purely from compiler
+improvements. If you haven't thanked your local compiler team today, you
+probably should. Using the slimmed-down JLLWrappers implementation of all
+relevant JLLWrappers packages results in a further lowering of load time down
+to a blistering `140ms`. End-to-end, this means that this work effected a
+roughly **`50x` speedup** in load times for large trees of binary artifacts.
+While there are some minor improvements for lazy loading of shared libraries
+and such in the pipeline, we are confident that this work will provide a strong
+foundation for Julia's binary packaging story for the foreseeable future.
+
+## Downloads & NetworkingOptions
+
+_Stefan Karpinski_
+
+In previous releases, when you download something in Julia, either directly,
+using the `Base.download` function, or indirectly when using `Pkg`, the actual
+downloading was done by some external process—whichever one of `curl`, `wget`,
+`fetch` or `PowerShell` happened to be available on your system. The fact that
+this frankendownload feature worked at all was something of a miracle, that only
+worked due to much fussy command-line-option finessing of over the years. And
+while this did mostly work, there were some major drawbacks to this approach.
+
+1. **It’s slow.** Starting a new process for each download is expensive; but
+ worse, those processes can’t share TCP connections or reuse already
+ negotiated TLS connections, so every download needs to do the TCP
+ SYN/ACK song and dance and then also do the TLS secret handshake, all of
+ which takes a lot of time.
+
+2. **It’s inconsistent.** Since the exact way things got downloaded depended on
+ what happens to be installed on your system, download behavior was terribly
+ inconsistent. Downloads that work on one system might not work on another
+ one. Moreover, any issues someone might have, inevitably end up out of scope
+ for Julia to fix — the typical answer is "fix your system
+ `curl`/`wget`/whatever," which is not a very satisfactory solution for someone
+ using Julia who just wants to be able to download things.
+
+3. **It’s inflexible.** The core requirements of downloading something are
+ simple: URL in, file out. But maybe you need to pass some custom headers with
+ the request. Or maybe you need to see what headers were returned. Often you
+ want to display progress for large downloads. Some download commands have
+ options for some of these, but we can only support options that are supported
+ by all download methods, which has forced downloads to be pretty inflexible.
+
+In Julia 1.6 all downloading is done with `libcurl-7.73.0` via the new
+`Downloads.jl` standard library. Downloading is done in-process and TCP+TLS
+connections are shared and reused. If the server supports HTTP/2, multiple
+requests to that server can even be multiplexed onto the same HTTPS connections.
+All of this means that downloads are much faster.
+
+Since all Julia users now use the same method to download things, if it works on
+one system, it is much more likely to work everywhere. No more broken downloads
+just because the system curl happens to be really old. And `libcurl` is highly
+configurable: we can pass custom headers with requests, see what headers were
+included with the response, and get download progress — all in the same way
+everywhere.
+
+As part of reworking downloads, we have switched to using the built-in TLS stack
+on macOS and Windows, which allows downloads to use the built-in mechanism for
+verifying the identity of TLS servers via the system’s collection of certificate
+authority root certificates (“CA roots”, for short). On Linux and FreeBSD, we
+now also look in the standard locations for a PEM file with CA root
+certificates. The advantage of using the system CA root certificates is that
+most systems will automatically keep these CA roots up-to-date and on Windows
+and macOS the OS will check for revoked certificates when performing certificate
+verification (Linux doesn't have standard way to do this). Julia itself still
+ships with a reasonably up-to-date bundle of CA roots, but we no longer use it
+by default unless system CA roots cannot be found.
+
+Using the system CA roots already means that it’s much more likely that Julia
+will “just work” from behind firewalls. Many institutional firewalls will
+man-in-the-middle (MITM) your outgoing HTTPS connections and present a forged
+HTTPS certificate for the server to your client connection. In order for this
+not to set off security alarms on your client, they will typically add a private
+CA root certificate to the user's system so that your browser will accept the
+firewall’s forged certificate. Since Julia now uses the system’s CA roots, it
+respects any private CA roots that have been added there.
+
+If this doesn’t work for some reason, Julia 1.6 also introduces the
+`NetworkOptions.jl` stdlib: this package acts as a central place for network
+configuration options that can be controlled by various environment variables
+and which are used to modify the behavior of networking libraries like `libcurl`
+and `libgit2` in a consistent way. For example, if you want to turn off HTTPS
+host verification entirely, you can do `export JULIA_SSL_NO_VERIFY_HOSTS="**"`
+in your shell and both the Downloads and LibGit2 packages will not perform host
+verification when downloading over HTTPS. There are various other options
+available in NetworkOptions, including:
+
+- `JULIA_SSL_CA_ROOTS_PATH` to provide a custom PEM file of CA roots
+- `SSH_KNOWN_HOSTS_FILES` to use non-standard locations for SSH known hosts
+- `JULIA_*_VERIFY_HOSTS` variables for fine-grained control over which hosts
+ should or shouldn’t be verified over various transports, including TLS and SSH
+
+These options are now consistently respected across all network-facing code that
+ships with Julia itself and we will be working with package developers to
+encourage them to use `NetworkOptions` for configuration of libraries such as
+mbedtls and others. This will allow consistent configuration of networking
+options across the entire Julia ecosystem.
+
+## CI Robustness
+
+_Jeff Bezanson, Keno Fischer, and Jameson Nash_
+
+This release cycle we spent quite a bit of time paying down technical debt in
+the form of intermittent test failures in our continuous integration (CI)
+process. Like all responsible software projects these days, we run our full
+build and test suite for every commit and for every proposed change. If the
+tests fail, you stop the presses until the problem is fixed — either by
+reverting a change, committing a new fix, or revising a proposed patch until it
+passes. Given this simple policy, it’s difficult to see how a project could be
+ambushed by persistent test failures. And yet that’s exactly what happened to
+us: over time, we ended up in a state where a high percentage of test runs
+failed, usually with just a single obscure test case failing.
+
+Several factors contributed to this predicament. First, the base Julia test
+suite is quite large and covers a wide range of functionality, from parsing and
+compiling to linear algebra, package management, sockets, threads, handling file system
+events, and more. With that much surface area, we were likely to end up with a
+handful of rare bugs, or failures due to overly-fragile tests. We run easily
+over a hundred builds per day, so even failures with a rate of 0.1% would appear often
+enough to be disruptive. Timing-sensitive tests are a classic example, e.g.
+testing that a one-second timeout indeed happens after approximately one second.
+On hosted VMs in particular, timing can be far more variable than what you would
+ever see on dedicated hardware. A one-second timeout can, unfortunately, take
+more than 60 seconds on a heavily loaded VM.
+
+After much debugging, including infrastructure work to run tests under
+[rr](https://julialang.org/blog/2020/05/rr) by default, we were able to
+identify and fix many issues. Here is a representative sample:
+
+- [Close a race condition in the FileWatching tests](https://github.com/JuliaLang/julia/pull/38407)
+- [Disarm watchdog timer after Sockets test finishes](https://github.com/JuliaLang/julia/pull/38586)
+- [Remove some overhead to reduce timing variation in Channels test](https://github.com/JuliaLang/julia/pull/38662)
+- [Fix a test for `mktemp` to prevent occasional duplicate names](https://github.com/JuliaLang/julia/pull/38779)
+- [Fix a calling convention issue causing occasional failures on FreeBSD](https://github.com/JuliaLang/julia/pull/38882)
+- [Fix a libunwind issue causing Profile test failures](https://github.com/JuliaLang/julia/pull/39553)
+- [Fix a race in AsyncCondition test](https://github.com/JuliaLang/julia/pull/39583)
+- [Fix occasional deadlock in REPL test](https://github.com/JuliaLang/julia/pull/39482)
+- [Port reuse issue causing occasional Distributed test failure on Darwin](https://github.com/JuliaLang/julia/pull/38901)
+- [Lock leak causing occasional test failure](https://github.com/JuliaLang/julia/pull/38246)
+
+As a result, the proportion of "green check" PRs is noticeably higher.
+We are not yet at 100%, but CI can now generally be expected to pass.
+
+
+## Improved stacktrace formatting
+
+_Kristoffer Carlsson_
+
+For Julia release 0.6, formatting of stacktraces [went through an overhaul](https://julialang.org/blog/2017/04/repl-0.6-highlights/#printing_of_stack_traces)
+and in this release [`@jkrumbiegel`](https://github.com/jkrumbiegel) made further improvements on this area (implemented in [#36134](https://github.com/JuliaLang/julia/pull/36134)).
+Let's look at an example of the old stacktrace printing and compare it to the new one:
+
+**Old stacktrace**:
+
+![Stacktrace in 1.0](/assets/blog/2021-1.6-highlights/julia-1.0-stacktrace.png)
+
+
+**New stacktrace**:
+
+![Stacktrace in 1.6](/assets/blog/2021-1.6-highlights/julia-1.6-stacktrace.png)
+
+-------------------------
+
+Some improvements are worth pointing out explicitly:
+
+- Argument names in methods are now shown.
+- The function name was made to be more emphasized compared to the surrounding text, since that information tends to be the most important.
+- The module where the method is defined is now shown and the modules are also color coded.
+- Paths to the method were de-emphasized since they usually have lower importance.
+- Paths were made shorter by showing `~` instead of the full path to the home directory.
+
+
+## Conclusion
+
+Please enjoy the release, and as always [let us know](https://github.com/JuliaLang/julia/issues)
+if you encounter any problems or have any suggestions.
+We hope to be back in about four months to report on even more progress in version 1.7!