-
Notifications
You must be signed in to change notification settings - Fork 16
/
index.html
2441 lines (2054 loc) · 124 KB
/
index.html
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<script src="https://www.w3.org/Tools/respec/respec-w3c" defer class="remove"></script>
<title>Privacy Principles</title>
<script class="remove">
// All config options at https://respec.org/docs/
var respecConfig = {
specStatus: 'ED',
group: 'tag',
format: 'markdown',
editors: [{
name: 'Robin Berjon',
company: 'Protocol Labs',
companyURL: 'https://protocol.ai/',
url: 'https://berjon.com/',
note: 'The New York Times until Sep 2022',
w3cid: 34327,
}, {
name: 'Jeffrey Yasskin',
company: 'Google',
companyURL: 'https://google.com/',
w3cid: 72192,
}],
github: 'w3ctag/privacy-principles',
latestVersion: 'https://www.w3.org/TR/privacy-principles/',
shortName: 'privacy-principles',
lint: {
'required-sections': false,
},
preProcess: [
function checkAudiences(_conf, _doc, utils) {
// Ensure every principle has a data-audiences attribute with a
// space-separated list of the target audiences of that principle.
for (const practice of document.querySelectorAll("div.practice")) {
const audiences = practice.dataset.audiences?.split(/\s+/) ?? [];
if (audiences.length === 0) {
utils.showError(
`Missing data-audiences attribute in principle.`,
{
title: "Missing data-audiences attribute.",
elements: [practice],
hint: 'Set it to a space-separated list of "websites", "user-agents", or "api-designers".',
}
);
continue;
}
const unknownAudiences = audiences.filter(
(audience) =>
!["websites", "user-agents", "api-designers"].includes(audience)
);
if (unknownAudiences.length > 0) {
const list = new Intl.ListFormat().format(unknownAudiences);
utils.showError(
`Unknown audience "${list}" in principle.`,
{
title: "Unknown audience in data-audience attribute.",
elements: [practice],
hint: `Remove ${list}.`,
}
);
}
}
}
],
postProcess: [
() => {
// Remove the nested Best Practices Summary header.
document.querySelector('#best-practices-summary > div.header-wrapper').remove();
const bpSummarySection = document.querySelector('#best-practices-summary');
bpSummarySection.replaceWith(...bpSummarySection.children);
document.querySelector('ol:not(:has(ol)):has(a[href="#best-practices-summary"])').remove();
// Renumber principles by their sections.
const principleSections = new Set();
for (let label of document.querySelectorAll('div.practice > a.marker > bdi')) {
principleSections.add(label.closest('section:has(bdi.secno)'));
}
for (const section of principleSections) {
const principleLabels = section.querySelectorAll('div.practice > a.marker > bdi');
let index = 1;
for (const label of principleLabels) {
let labelText = `Principle ${section.querySelector('bdi.secno').textContent.trim()}`
if (principleLabels.length > 1) {
labelText += `.${index}`;
index++;
}
label.textContent = labelText;
const linkTarget = label.closest('div').querySelector('span.practicelab').id;
document.querySelector(`#bp-summary > ul > li > a.marker[href='#${linkTarget}'] > bdi`)
.textContent = labelText;
}
}
// Remove empty <p>s.
for (let p of document.querySelectorAll('div.practice > p')) {
if (/^\s*$/.test(p.textContent)) p.remove();
}
// Add audience chips.
const audienceNames = {
websites: 'websites',
'user-agents': 'user agents',
'api-designers': 'API designers',
};
for (let prac of document.querySelectorAll('div.practice.advisement')) {
prac.classList.remove('advisement');
prac.classList.add('principle');
const anchor = prac.querySelector('span.practicelab');
const summary = document.querySelector(`#bp-summary a.marker.self-link[href="#${anchor.id}"]`);
if (!summary) console.error(`No summary for`, anchor);
const audiences = prac.dataset.audiences?.split(/\s+/) ?? [];
const audDiv = document.createElement('div');
audDiv.setAttribute('class', 'audience-label');
const audSum = document.createElement('span');
audSum.setAttribute('class', 'audience-label');
audiences.forEach(lbl => {
const spn = document.createElement('span');
spn.setAttribute('class', `audience-${lbl}`);
spn.textContent = audienceNames[lbl];
audDiv.append(spn);
audSum.append(spn.cloneNode(true));
if (summary) summary.parentNode.append(audSum);
});
prac.append(audDiv);
}
}
],
localBiblio: {
'ADDING-PERMISSIONS': {
title: 'Adding another permission? A guide',
authors: ['Nick Doty'],
date: '2018',
href: 'https://github.com/w3cping/adding-permissions'
},
'Addressing-Cyber-Harassment': {
title: 'Addressing cyber harassment: An overview of hate crimes in cyberspace',
authors: ['Danielle Keats Citron'],
publisher: 'Case Western Reserve Journal of Law, Technology & the Internet',
date: '2015',
href: 'https://scholarship.law.bu.edu/cgi/viewcontent.cgi?article=1634&context=faculty_scholarship'
},
'Anti-Tracking-Policy': {
title: 'Anti-Tracking Policy',
href: 'https://wiki.mozilla.org/Security/Anti_tracking_policy#Tracking_Definition',
publisher: 'Mozilla',
},
'Automating-Inequality': {
title: 'Automating Inequality: How High-Tech Tools Profile, Police, and Punish the Poor',
href: 'https://us.macmillan.com/books/9781250074317/automatinginequality',
authors: ['Virginia Eubanks'],
publisher: 'Macmillan',
},
'Beyond-Individual': {
title: 'Privacy Beyond the Individual Level (in Modern Socio-Technical Perspectives on Privacy)',
href: 'https://doi.org/10.1007/978-3-030-82786-1_6',
authors: ['J.J. Suh', 'M.J. Metzger'],
publisher: 'Springer',
},
'Big-Data-Competition': {
title: 'Big Data and Competition Policy',
href: 'https://global.oup.com/academic/product/big-data-and-competition-policy-9780198788140?lang=en&cc=us',
authors: ['Maurice E. Stucke', 'Allen P. Grunes'],
publisher: 'Oxford University Press',
},
'Bit-By-Bit': {
title: 'Bit By Bit: Social Research in the Digital Age',
href: 'https://www.bitbybitbook.com/',
authors: ['Matt Salganik'],
publisher: 'Princeton University Press',
status: 'You can read this book free of charge, but Matt is an outstanding author and I encourage you to support him by buying his book!',
},
'Browser-Parties': {
title: 'Parties and browsers',
href: 'https://tess.oconnor.cx/2020/10/parties',
authors: ["Tess O'Connor"],
},
'CAT': {
title: 'Content Aggregation Technology (CAT)',
authors: ['Robin Berjon', 'Justin Heideman'],
href: 'https://nytimes.github.io/std-cat/',
},
'Contextual-Integrity': {
title: 'Privacy As Contextual Integrity',
authors: ['Helen Nissenbaum'],
href: 'https://digitalcommons.law.uw.edu/wlr/vol79/iss1/10/',
publisher: 'Washington Law Review',
},
'Confiding': {
title: 'Confiding in Con Men: U.S. Privacy Law, the GDPR, and Information Fiduciaries',
authors: ['Lindsey Barrett'],
href: 'https://papers.ssrn.com/sol3/papers.cfm?abstract_id=3354129',
},
'Consent-Lackeys': {
title: 'Publishers tell Google: We\'re not your consent lackeys',
authors: ['Rebecca Hill'],
href: 'https://www.theregister.com/2018/05/01/publishers_slam_google_ad_policy_gdpr_consent/',
publisher: 'The Register',
},
'Convention-108': {
title: 'Convention for the Protection of Individuals with regard to Automatic Processing of Personal Data',
href: 'https://rm.coe.int/1680078b37',
publisher: 'Council of Europe',
},
'Dark-Patterns': {
title: 'Dark patterns: past, present, and future',
authors: ['Arvind Narayanan', 'Arunesh Mathur', 'Marshini Chetty', 'Mihir Kshirsagar'],
href: 'https://dl.acm.org/doi/10.1145/3397884',
publisher: 'ACM',
},
'Dark-Pattern-Dark': {
title: 'What Makes a Dark Pattern… Dark? Design Attributes, Normative Considerations, and Measurement Methods',
authors: ['Arunesh Mathur', 'Jonathan Mayer', 'Mihir Kshirsagar'],
href: 'https://arxiv.org/abs/2101.04843v1',
},
'Data-Futures-Glossary': {
title: 'Data Futures Lab Glossary',
authors: ['Mozilla Insights'],
href: 'https://foundation.mozilla.org/en/data-futures-lab/data-for-empowerment/data-futures-lab-glossary/',
publisher: 'Mozilla Foundation',
},
'Data-Minimization': {
title: 'Data Minimization in Web APIs',
authors: ['Daniel Appelquist'],
href: 'https://www.w3.org/2001/tag/doc/APIMinimization-20100605.html',
publisher: 'W3C TAG',
status: 'Draft Finding',
},
'De-identification-Privacy-Act': {
title: 'De-identification and the Privacy Act',
authors: ['Office of the Australian Information Commissioner'],
href: 'https://www.oaic.gov.au/privacy/guidance-and-advice/de-identification-and-the-privacy-act',
publisher: 'Australian Government',
},
'Digital-Assistant-Trust': {
title: "Facebook's new digital assistant 'M' will need to earn your trust",
authors: ['Neil Richards', 'Woodrow Hartzog'],
href: 'https://www.theguardian.com/technology/2015/sep/09/what-should-we-demand-of-facebooks-new-digital-assistant',
publisher: 'The Guardian',
},
'Digital-Market-Manipulation': {
title: 'Digital Market Manipulation',
authors: ['Ryan Calo'],
href: 'https://papers.ssrn.com/sol3/papers.cfm?abstract_id=2309703',
publisher: 'George Washington Law Review',
},
'Eurobarometer-443': {
title: 'Eurobarometer 443: e-Privacy',
authors: ['European Commission'],
href: 'https://ec.europa.eu/COMMFrontOffice/publicopinion/index.cfm/Survey/getSurveyDetail/instruments/FLASH/surveyKy/2124',
},
'Fiduciary-Law': {
title: 'Fiduciary Law',
href: 'http://www.bu.edu/lawlibrary/facultypublications/PDFs/Frankel/Fiduciary%20Law.pdf',
authors: ['Tamar Frankel'],
date: 'May 1983',
publisher: 'California Law Review',
},
'Fiduciary-Model': {
title: 'The Fiduciary Model of Privacy',
href: 'https://ssrn.com/abstract=3700087',
authors: ['Jack M. Balkin'],
date: '26 September 2020',
publisher: 'Harvard Law Review Forum',
},
'Fiduciary-UA': {
title: 'The Fiduciary Duties of User Agents',
href: 'https://papers.ssrn.com/sol3/papers.cfm?abstract_id=3827421',
authors: ['Robin Berjon'],
},
'FIP': {
title: 'Fair Information Practices: A Basic History',
href: 'http://bobgellman.com/rg-docs/rg-FIPShistory.pdf',
authors: ['Bob Gellman'],
status: '(PDF)',
},
'For-Everyone': {
title: 'This Is For Everyone',
href: 'https://twitter.com/timberners_lee/status/228960085672599552',
authors: ['Tim Berners-Lee'],
status: 'Statement made to the London 2012 Olympics opening ceremony',
},
'GDPR': {
title: 'General Data Protection Regulations (GDPR) / Regulation (EU) 2016/679',
href: 'https://eur-lex.europa.eu/legal-content/EN/TXT/HTML/?uri=CELEX:32016R0679&from=EN',
authors: ['European Parliament and Council of European Union'],
},
'GKC-Privacy': {
title: 'Governing Privacy in Knowledge Commons',
authors: ['Madelyn Rose Sanfilippo', 'Brett M. Frischmann', 'Katherine J. Strandburg'],
href: 'https://www.cambridge.org/core/books/governing-privacy-in-knowledge-commons/FA569455669E2CECA25DF0244C62C1A1',
publisher: 'Cambridge University Press',
},
'IAD': {
title: 'Understanding Institutional Diversity',
authors: ['Elinor Ostrom'],
href: 'https://press.princeton.edu/books/paperback/9780691122380/understanding-institutional-diversity',
publisher: 'Princeton University Press',
},
'Individual-Group-Privacy': {
title: 'From Individual to Group Privacy in Big Data Analytics',
authors: ['Brent Mittelstadt'],
href: 'https://link.springer.com/article/10.1007/s13347-017-0253-7',
publisher: 'Philosophy & Technology',
},
'Industry-Unbound': {
title: 'Industry Unbound: the inside story of privacy, data, and corporate power',
authors: ['Ari Ezra Waldman'],
href: 'https://www.cambridge.org/core/books/industry-unbound/787989F90DBFC08E47546178A7AB04F7',
publisher: 'Cambridge University Press',
},
'Internet-of-Garbage': {
title: 'The Internet of Garbage',
authors: ['Sarah Jeong'],
publisher: 'The Verge',
date: '2018',
href: 'https://www.theverge.com/2018/8/28/17777330/internet-of-garbage-book-sarah-jeong-online-harassment'
},
'Lost-In-Crowd': {
title: 'Why You Can No Longer Get Lost in the Crowd',
authors: ['Woodrow Hartzog', 'Evan Selinger'],
href: 'https://www.nytimes.com/2019/04/17/opinion/data-privacy.html',
publisher: 'The New York Times',
},
'Nav-Tracking': {
title: 'Navigational-Tracking Mitigations',
authors: ['Pete Snyder', 'Jeffrey Yasskin'],
href: 'https://privacycg.github.io/nav-tracking-mitigations/',
publisher: 'W3C',
},
'New-Chicago-School': {
title: 'The New Chicago School',
href: "https://www.docdroid.net/i3pUJof/lawrence-lessig-the-new-chicago-school-1998.pdf",
authors: ['Lawrence Lessig'],
publisher: "The Journal of Legal Studies",
date: "June 1998",
doi: "10.1086/468039",
},
'NIST-800-63A': {
title: 'Digital Identity Guidelines: Enrollment and Identity Proofing Requirements',
href: 'https://pages.nist.gov/800-63-3/sp800-63a.html',
publisher: 'NIST',
authors: ['Paul A. Grassi', 'James L. Fenton', 'Naomi B. Lefkovitz', 'Jamie M. Danker', 'Yee-Yin Choong', 'Kristen K. Greene', 'Mary F. Theofanos'],
date: 'March 2020'
},
'NYT-Privacy': {
title: 'How The New York Times Thinks About Your Privacy',
author: ['Robin Berjon'],
href: 'https://open.nytimes.com/how-the-new-york-times-thinks-about-your-privacy-bc07d2171531',
publisher: 'NYT Open',
},
'Obfuscation': {
title: 'Obfuscation: A User\'s Guide for Privacy and Protest',
authors: ['Finn Brunton', 'Helen Nissenbaum'],
href: 'https://www.penguinrandomhouse.com/books/657301/obfuscation-by-finn-brunton-and-helen-nissenbaum/',
publisher: 'Penguin Random House',
},
'Obscurity-By-Design': {
title: 'Obscurity by Design',
authors: ['Woodrow Hartzog', 'Frederic Stutzman'],
href: 'https://papers.ssrn.com/sol3/papers.cfm?abstract_id=2284583',
},
'OECD-Guidelines': {
title: 'OECD Guidelines on the Protection of Privacy and Transborder Flows of Personal Data',
href: 'https://doi.org/10.1787/9789264196391-en',
date: '2002',
publisher: 'OECD Publishing',
},
'PEN-Harassment': {
href: 'https://onlineharassmentfieldmanual.pen.org/defining-online-harassment-a-glossary-of-terms/',
title: 'Online Harassment Field Manual',
publisher: 'PEN America',
},
'PEW-Harassment': {
title: 'The State of Online Harassment',
publisher: 'Pew Research Center',
date: 'January 2021',
href: 'https://www.pewresearch.org/internet/2021/01/13/the-state-of-online-harassment/'
},
'Phone-On-Feminism': {
title: 'This is your phone on feminism',
href: 'https://conversationalist.org/2019/09/13/feminism-explains-our-toxic-relationships-with-our-smartphones/',
authors: ['Maria Farrell'],
publisher: 'The Conversationalist',
rawDate: '2019-09-13',
},
'Portability-Threat-Model': {
title: 'User Data Portability Threat Model',
authors: ['Lisa Dusseault'],
href: 'https://dtinit.org/assets/ThreatModel.pdf',
publisher: 'Data Transfer Initiative',
},
'Privacy-Behavior': {
title: 'Privacy and Human Behavior in the Age of Information',
authors: ['Alessandro Acquisti', 'Laura Brandimarte', 'George Loewenstein'],
href: 'https://www.heinz.cmu.edu/~acquisti/papers/AcquistiBrandimarteLoewenstein-S-2015.pdf',
publisher: 'Science',
},
'Privacy-Concerned': {
title: 'Americans and Privacy: Concerned, Confused and Feeling Lack of Control Over Their Personal Information',
authors: ['Brooke Auxier', 'Lee Rainie', 'Monica Anderson', 'Andrew Perrin', 'Madhu Kumar', 'Erica Turner'],
href: 'https://www.pewresearch.org/internet/2019/11/15/americans-and-privacy-concerned-confused-and-feeling-lack-of-control-over-their-personal-information/',
publisher: 'Pew Research Center',
},
'Privacy-Contested': {
title: 'Privacy is an essentially contested concept: a multi-dimensional analytic for mapping privacy',
authors: ['Deirdre K. Mulligan', 'Colin Koopman', 'Nick Doty'],
href: 'https://www.ncbi.nlm.nih.gov/pmc/articles/PMC5124066/',
publisher: 'Philosophical Transacions A',
},
'Privacy-Harms': {
title: 'Privacy Harms',
authors: ['Danielle Keats Citron', 'Daniel Solove'],
href: 'https://papers.ssrn.com/sol3/papers.cfm?abstract_id=3782222',
},
'Privacy-In-Context': {
title: 'Privacy in Context',
authors: ['Helen Nissenbaum'],
href: 'https://www.sup.org/books/title/?id=8862',
publisher: 'SUP',
},
'Privacy-Is-Power': {
title: 'Privacy Is Power',
authors: ['Carissa Véliz'],
href: 'https://www.penguin.com.au/books/privacy-is-power-9781787634046',
publisher: 'Bantam Press',
},
'Privacy-Threat': {
title: 'Target Privacy Threat Model',
href: 'https://w3cping.github.io/privacy-threat-model/',
authors: ['Jeffrey Yasskin', 'Tom Lowenthal'],
publisher: 'W3C PING',
},
'PSL-Problems': {
authors: ['Ryan Sleevi'],
href: 'https://github.com/sleevi/psl-problems',
title: 'Public Suffix List Problems'
},
'Relational-Turn': {
title: 'A Relational Turn for Data Protection?',
href: 'https://papers.ssrn.com/sol3/papers.cfm?abstract_id=3745973&s=09',
authors: ['Neil Richards', 'Woodrow Hartzog'],
},
'Seeing-Like-A-State': {
title: 'Seeing Like a State: How Certain Schemes to Improve the Human Condition Have Failed',
href: 'https://bookshop.org/books/seeing-like-a-state-how-certain-schemes-to-improve-the-human-condition-have-failed/9780300246759',
authors: ['James C. Scott'],
},
'SILVERPUSH': {
title: 'How TV ads silently ping commands to phones: Sneaky SilverPush code reverse-engineered',
href: 'https://www.theregister.com/2015/11/20/silverpush_soundwave_ad_tracker/',
publisher: 'The Register',
authors: ['Iain Thomson']
},
'Strava-Debacle': {
title: 'The Latest Data Privacy Debacle',
authors: ['Zeynep Tufekci'],
href: 'https://www.nytimes.com/2018/01/30/opinion/strava-privacy.html',
publisher: 'The New York Times',
},
'Standard-Bodies-Regulators': {
title: 'Technical Standards Bodies are Regulators',
authors: ['Mark Nottingham'],
href: 'https://www.mnot.net/blog/2023/11/01/regulators',
},
'Strava-Reveal-Military': {
title: 'Strava Fitness App Can Reveal Military Sites, Analysts Say',
authors: ['Richard Pérez-Peña', 'Matthew Rosenberg'],
href: 'https://www.nytimes.com/2018/01/29/world/middleeast/strava-heat-map.html',
publisher: 'The New York Times',
},
'Surveillance-Capitalism': {
title: 'The Age of Surveillance Capitalism: The Fight for a Human Future at the New Frontier of Power',
authors: ['Shoshana Zuboff'],
href: 'https://www.publicaffairsbooks.com/titles/shoshana-zuboff/the-age-of-surveillance-capitalism/9781610395694/',
publisher: 'Hachette Public Affairs',
},
'Taking-Trust-Seriously': {
title: 'Taking Trust Seriously in Privacy Law',
href: 'https://papers.ssrn.com/sol3/papers.cfm?abstract_id=2655719',
authors: ['Neil Richards', 'Woodrow Hartzog'],
},
'Twitter-Developer-Policy': {
title: 'Developer Policy - Twitter Developers',
href: 'https://developer.twitter.com/en/developer-terms/policy',
publisher: 'Twitter'
},
'Tracking-Prevention-Policy': {
title: 'Tracking Prevention Policy',
href: 'https://webkit.org/tracking-prevention-policy/',
publisher: 'Apple',
},
'Understanding-Privacy': {
title: 'Understanding Privacy',
authors: ['Daniel Solove'],
href: 'https://www.hup.harvard.edu/catalog.php?isbn=9780674035072',
publisher: 'Harvard University Press',
},
'Why-Privacy': {
title: 'Why Privacy Matter',
authors: ['Neil Richards'],
href: 'https://global.oup.com/academic/product/why-privacy-matters-9780190939045?cc=us&lang=en&',
publisher: 'Oxford University Press',
},
'Records-Computers-Rights': {
title: 'Records, Computers and the Rights of Citizens',
publisher: 'U.S. Department of Health, Education & Welfare',
href: 'https://archive.epic.org/privacy/hew1973report/'
},
'Relational-Governance': {
title: 'A Relational Theory of Data Governance',
authors: ['Salomé Viljoen'],
href: 'https://www.yalelawjournal.org/feature/a-relational-theory-of-data-governance',
publisher: 'Yale Law Journal',
},
'web-without-3p-cookies': {
title: 'Improving the web without third-party cookies',
authors: ['Amy Guy'],
href: 'https://www.w3.org/2001/tag/doc/web-without-3p-cookies/',
publisher: 'W3C',
},
},
};
</script>
<style>
.principle {
border: .5em;
border-color: cornflowerblue;
border-style: none none none double;
background: transparent;
padding: .5em;
page-break-inside: avoid;
margin: 1em auto;
}
.principle > .marker {
color: cornflowerblue;
font-weight: bold;
}
q {
font-style: italic;
}
.audience-label {
font-size: 0.9em;
}
.audience-label > span {
display: inline-block;
padding: 0.1em 0.4em;
margin: 0 0.2em;
border-radius: 4px;
}
#bp-summary .audience-label > span {
padding: 0 0.4em;
}
.audience-websites { background-color: gold; }
.audience-user-agents { background-color: mediumspringgreen; }
.audience-api-designers { background-color: mistyrose; }
ul:has(#include-websites) li { list-style:none }
/* Show summary principles if their audience is selected. */
#bp-summary li:has(.practicelab) { display: none }
#bp-summary ul:has(#include-websites:checked) + ul li:has(.audience-websites) { display: list-item; }
#bp-summary ul:has(#include-user-agents:checked) + ul li:has(.audience-user-agents) { display: list-item; }
#bp-summary ul:has(#include-api-designers:checked) + ul li:has(.audience-api-designers) { display: list-item; }
</style>
</head>
<body data-cite="html indexedDB service-workers fingerprinting-guidance url infra">
<section id="abstract">
Privacy is an essential part of the web. This document provides definitions
for privacy and related concepts that are applicable worldwide as well as a set of privacy
principles that should guide the development of the web as a trustworthy platform. People using
the web would benefit from a stronger relationship between technology and policy, and this
document is written to work with both.
</section>
<section id="sotd">
This document is a Draft Finding of the [Technical Architecture Group (TAG)](https://www.w3.org/2001/tag/)
which we are releasing as a Draft Note. The intent is for this document to become a W3C Statement.
It was prepared by the [Web Privacy Principles Task Force](https://github.com/w3ctag/privacy-principles),
which was convened by the TAG. Publication as a Draft Finding or Draft Note does not imply
endorsement by the TAG or by the W3C Membership.
The substance of this draft reflects the consensus of the TAG, but it is subject to ongoing
editorial work and restructuring. Please bear this in mind when citing or linking
to this document, as section numbers and headings may change.
This document is considered stable by the TAG and is ready for wide review.
</section>
<section class="introductory">
## How This Document Fits In
This document elaborates on the <a data-cite="ethical-web-principles#privacy">privacy principle</a>
from the [[[ethical-web-principles]]]: "Security and privacy are essential." While it focuses on privacy, this should
not be taken as an indication that privacy is always more important than other ethical web principles, and
this document doesn't address how to balance the different ethical web principles if they come into conflict.
Privacy on the web is primarily regulated by two forces: the architectural capabilities that the web
platform exposes (or does not expose), and laws in the various jurisdictions where the web is used
([[New-Chicago-School]], [[Standard-Bodies-Regulators]]). These regulatory mechanisms are separate; a law in one country does not
(and should not) change the architecture of the whole web, and likewise web specifications cannot
override any given law (although they can affect how easy it is to create and enforce law). The web
is not merely an implementation of a particular legal privacy regime; it has distinct features and
guarantees driven by shared values that often exceed legal requirements for privacy.
However, the overall goal of privacy on the web is served best when technology and law complement
each other. This document seeks to establish shared concepts as an aid to technical efforts to
regulate privacy on the web. It may also be useful in pursuing alignment with and between legal
regulatory regimes.
Our goal for this document is not to cover all possible privacy issues, but rather to provide enough
background to support the web community in making informed decisions about privacy and in weaving
privacy into the architecture of the web.
Few architectural principles are absolute, and privacy is no exception: privacy can come into tension
with other desirable properties of an ethical architecture, including accessibility or internationalization,
and when that happens the web community will have to work together to strike the right balance.
</section>
<section class="introductory">
## Audiences for this Document {#audience}
The primary audiences for this document are
* browser developers,
* authors of web specifications,
* reviewers of web specifications, and
* web developers.
Additional audiences include:
* policy makers and
* operators of privacy-related services.
This document is intended to help its audiences address privacy concerns as early as possible in the life
cycle of a new web standard or feature, or in the development of web products. Beginning with privacy in mind will help avoid the need to
add special cases later to address unforeseen but predictable issues or
to build systems that turn out to be unacceptable to users.
Because this document guides privacy reviews of new standards, authors of web
specifications should consult it early in the design to make sure their feature
passes the review smoothly.
</section>
<section class="introductory" id="bp-summary">
## List of Principles {#principle-list}
This section is a list of all the privacy principles,
with links to their longer explanations in the rest of the document.
Which audiences should be included?
* <label><input type="checkbox" checked id="include-websites"> Websites</label>
* <label><input type="checkbox" checked id="include-user-agents"> User Agents</label>
* <label><input type="checkbox" checked id="include-api-designers"> API Designers</label>
</section>
# An Introduction to Privacy on the Web {#intro}
This is a document containing technical guidelines. However, in order to put those guidelines in context we
must first define some terms and explain what we mean by privacy.
The web is a social and technical system made up of [=information flows=]. Because this document
is specifically about [=privacy=] as it applies to the web, it focuses on privacy with respect to
information flows.
The web is for everyone ([[?For-Everyone]]). It should be "<i>a platform that helps people and provides a
net positive social benefit</i>" ([[?ethical-web-principles]]). One of the ways in which the
web serves people is by seeking to protect them from surveillance and the types of manipulation that data can
enable.
Information can be used to predict and to influence people, as well as to design online
spaces that control people's behaviour. The collection and [=processing=] of information in greater
volume, with greater precision and reliability, with increasing interoperability across a growing
variety of data types, and at intensifying speed is leading to a concentration of power that threatens
private and public liberties. What's more, automation and the increasing computerisation of all aspects
of our lives both increase the power of information and decrease the cost of a number of intrusive
behaviours that would be more easily kept in check if the perpetrator had to be in the same room as
the victim.
When an [=actor=] can collect [=data=] about a [=person=] and process it automatically, and that
[=person=] has to take manual action to protect their [=data=] or control its processing, this <dfn>automation asymmetry</dfn>
creates an imbalance of power that favors that [=actor=] and decreases the [=person=]'s agency.
This document focuses on the impact that [=data=] [=processing=] can have on people, but it can also
impact other [=actors=], such as companies or governments.
It is important to keep in mind that not all people are equal in how they can resist
an imbalance of power: some [=people=] are more [=vulnerable=] and therefore in greater
need of protection.
<dfn data-lt="governance">Data governance</dfn> is the system of principles that regulate [=information flows=].
[=Data governance=] determines
which [=actors=] can collect [=data=], what data they can collect, how they can collect it, and how they can [=process=] it
([[?GKC-Privacy]], [[?IAD]]). This document provides building blocks for [=data governance=]
that puts [=people=] first.
Principles vary from [=context=] to [=context=] ([[?Understanding-Privacy]], [[?Contextual-Integrity]]).
For instance, people have different expectations of [=privacy=] at work, at a café, or at home. Understanding and
evaluating a privacy situation is best done by clearly identifying:
* Its [=actors=], which include the subject of the information as well as the sender and the recipient
of the [=information flow=]. (Note that recipients might not always want to be recipients.)
* The type of data involved in the [=information flow=].
* The principles that are in use in this context.
There are <em>always</em> privacy principles at work. Some sets of principles may be more
permissive, but that does not make them neutral. All privacy principles have an impact on
[=people=] and we must therefore determine which principles best align with ethical web values in
web [=contexts=] ([[?ethical-web-principles]], [[?Why-Privacy]]).
<dfn>Information flows</dfn> are information exchanged or processed by
[=actors=]. A person's privacy can be harmed both by their information flowing from them to
other actors and by information flowing toward them. Examples of the latter include:
unexpected shocking images,
loud noises while they intend to sleep, manipulative information, interruptive
messages when their focus is on something else, or harassment when they seek social interactions.
(In some of these cases, the information may not be [=personal data=].)
On the web, [=information flows=] may involve a wide variety of [=actors=] that are not always
recognizable or obvious to a user within a particular interaction. Visiting a website may involve
the actors that contribute to operating that site, but also actors with network access,
which may include: Internet service providers; other network operators; local institutions providing
a network connection including schools, libraries, or universities; government intelligence services;
malicious hackers who have gained access to the network or the systems of any of the other actors.
High-level threats including surveillance may be pursued by these actors ([[RFC6973]]). Pervasive monitoring,
a form of large-scale, indiscriminate surveillance, is a known attack on the privacy of users of the
internet and the web [[RFC7258]].
Information flows may also involve other people — for example, other users of a site —
which could include friends, family members, teachers, strangers, or government officials. Some
threats to privacy, including both disclosure and harassment, may be particular to the other
people involved in the information flow ([[RFC6973]]).
## Individual Autonomy {#autonomy}
A [=person=]'s <dfn data-lt="autonomous">autonomy</dfn> is their ability to make decisions of their own personal will,
without undue influence from other [=actors=]. People have limited intellectual resources and
time with which to weigh decisions, and they have to rely on shortcuts when making decisions. This makes it possible
to manipulate their preferences, including their privacy preferences ([[?Privacy-Behavior]], [[?Digital-Market-Manipulation]]).
A [=person=]'s [=autonomy=] is improved by a system when that system offers a shortcut that is closer to what
that [=person=] would have decided given unlimited time and intellectual ability. [=Autonomy=] is decreased
when a similar shortcut goes against decisions made under these ideal conditions.
Affordances and interactions that decrease [=autonomy=] are known as <dfn data-lt="dark pattern|dark patterns">deceptive patterns</dfn> (or dark patterns).
A [=deceptive pattern=] does not have to be intentional ([[?Dark-Patterns]], [[?Dark-Pattern-Dark]]).
When building something that may impact people's [=autonomy=], it is important that reviewers
from multiple independent perspectives check that it does not introduce [=deceptive patterns=].
Given the large volume of potential [=data=]-related decisions in today's data economy,
it is impossible for people to have detailed control over how their data is processed.
This fact does not imply that privacy is dead. Studies show that
[=people=] remain concerned over how their [=data=] is [=processed=], that they feel powerless,
and sense that they have lost agency ([[?Privacy-Concerned]]). If we design our technological infrastructure
carefully, we can give people greater [=autonomy=] with respect to their own [=data=]. This is
done by setting [=appropriate=], privacy-protective defaults and designing user-friendly choice
architectures.
### Opt-in, Consent, Opt-out, Global Controls {#opt-in-out}
Several kinds of mechanisms exist to enable [=people=] to control how they interact
with data-processing systems. Mechanisms that increase the number of [=purposes=] for which
their [=data=] is being [=processed=] or the amount of their [=data=] that is [=processed=]
are referred to as [=opt-in=] or <dfn data-lt="opt in|opt-in">consent</dfn>. Mechanisms
that decrease this number of [=purposes=] or the amount of [=data=] being [=processed=] are known as
<dfn data-lt="opt out">opt-out</dfn>.
When deployed thoughtfully, these mechanisms can improve [=people=]'s [=autonomy=]. Often,
however, they are used as a way to avoid putting in the difficult work of deciding which
types of [=processing=] are [=appropriate=] and which are not, offloading [=privacy labor=]
to the people using a system.
[=People=] should be able to [=consent=] to data sharing that would
otherwise be restricted, such as granting access to their pictures or geolocation.
[=Actors=] need to take care that their users are [*informed*](#consent-principles) when
granting this [=consent=] and *aware* enough about what's going on that they can know to
revoke their consent when they want to.
[=Consent=] to data processing and granting permissions to access web platform APIs are
similar problems. Both consent and permissions should be requested in a way that lets
people delay or avoid answering if they're trying to do something else. If the user
grants some form of persistent access to data, there should be an indicator that lets
people notice this ongoing access and that lets them turn it off whenever they wish to.
In general, providing [=consent=] should be rare, intentional, and temporary.
When an [=opt-out=] mechanism exists, it should preferably work with a
<dfn>global opt-out</dfn> mechanism. Conceptually, a [=global opt-out=] mechanism is an
automaton operating as part of the [=user agent=]. It is equivalent to a robot that would carry
out a [=person=]'s instructions by pressing an [=opt-out=] button (or a similar expression of
the [=person=]'s rights) with every interaction that the [=person=] has with a site. (For
instance, the [=person=] may be objecting to [=processing=] based on legitimate interest,
withdrawing [=consent=] to specific [=purposes=], or requesting that their data not be sold or
shared.) The [=user=] is effectively delegating the expression of their [=opt-out=] to their
[=user agent=], which helps rectify [=automation asymmetry=]. The [[[?gpc-spec]]] is a good
example of a [=global opt-out=] mechanism.
Under this model, a [=global opt-out=] signal should not be understood as a decision that a
[=person=] made a while ago when they flipped a setting or chose to use a specific
[=user agent=] but rather as a preference that they have chosen to automatically reaffirm with
every interaction with the site.
One implementation strategy for [=opt-outs=] or other <a href="#data-rights">data rights</a> is
to assign [=people=] stable [=identifiers=] and to maintain a central registry to map these
[=identifiers=] to [=people=]'s preferences. [=Actors=] that wish to process a given person's
data are then expected to fetch that person's preferences from the central registry and to
configure their processing accordingly. This approach has notably been deployed to capture
[=opt-outs=] of marketing uses of people's phone numbers or residential addresses. This
approach is not recommended, for multiple reasons: it offers no technical protection against
bad actors, it creates one central point of failure, it is hard to meaningfully audit (particularly
for the scale of processing implied by web systems), and experience with existing systems
shows that they make it hard for [=people=] to exercise their rights.
### Privacy Labor {#privacy-labor}
<dfn data-lt="labor">Privacy labor</dfn> is the practice of having a [=person=] do
the work of ensuring [=data processing=] of which they are the subject or recipient is
[=appropriate=], instead of putting the responsibility on the [=actors=] who are doing the processing.
Data systems that are based on asking [=people=] for their [=consent=] tend to increase
[=privacy labor=].
More generally, implementations of [=privacy=] often offload [=labor=] to [=people=]. This is
notably true of the regimes descended from the <dfn data-lt="FIPs">Fair Information Practices</dfn>
([=FIPs=]), a loose set of principles initially elaborated in the 1970s in support of individual
[=autonomy=] in the face of growing concerns with databases. The [=FIPs=] generally assume that
there is sufficiently little [=data processing=] taking place that any [=person=] will be able to
carry out sufficient diligence to be [=autonomous=] in their decision-making. Since they offload
the [=privacy labor=] to people and assume perfect, unlimited [=autonomy=], the [=FIPs=] do not
forbid specific types of [=data processing=] but only place them under different procedural
requirements. This approach is no longer [=appropriate=].
One notable issue with procedural approaches to privacy is that they tend to have the same
requirements in situations where people find themselves in a significant asymmetry of
power with another [=actor=] — for instance a [=person=] using an essential service provided by a
monopolistic platform — and those where a person and the other [=actor=] are very much on equal
footing, or even where the [=person=] may have greater power, as is the case with small
businesses operating in a competitive environment. They also do not consider cases in
which one [=actor=] may coerce other [=actors=] into facilitating its [=inappropriate=]
practices, as is often the case with dominant players in advertising or in content aggregation
([[?Consent-Lackeys]], [[?CAT]]).
Reference to the [=FIPs=] survives to this day. They are often referenced as "<i>transparency
and choice</i>", which, in today's digital environment, is often an indication that
[=inappropriate=] [=processing=] is being described.
## Vulnerability {#vulnerability}
Sometimes particular groups of people, such as children, or the elderly,
are classified as [=vulnerable people=]. However, any [=person=] could be vulnerable in
one or more contexts, sometimes without realizing it.
A [=person=] may not realise when they disclose personal data that
they are vulnerable or could become vulnerable, and an [=actor=] may have
no way of knowing that a person is vulnerable.
System designers should consider this in their system designs.
Some individuals may be more vulnerable to privacy risks or harm as a result of
collection, misuse, loss, or theft of personal data because:
* of their attributes, interests, opinions, or behaviour;
* of the situation or setting (e.g. where there is information asymmetry or other
power imbalances);
* they lack the capacity to fully assess the risks;
* choices are not presented in an easy-to-understand meaningful way (e.g. [=deceptive
patterns=]);
* they have not been consulted about their privacy needs and expectations;
* they have not been considered in the decisions about the design of the
product or service.
Additional privacy protections may be needed for personal data of vulnerable
people or [sensitive information](#hl-sensitive-information) which could cause
someone to become vulnerable if their personal data is collected, used, or
shared (e.g. blocking tracking elements, sensor data, or information about
installed software or connected devices).
While sometimes others can help vulnerable people assess privacy risks and
make decisions about privacy (such as parents, [=guardians=], and peers), everyone
has their own right to privacy.
### Guardians {#guardians}
Some [vulnerable people](#vulnerability) need a <dfn>guardian</dfn> to help them make good
decisions about their own web use (e.g. children, with their parents often
acting as their [=guardians=]). A person with a [=guardian=] is known as
a <dfn>ward</dfn>.
The [=ward=] has a right to make informed decisions and exercise their
autonomy regarding their right to privacy. Their [=guardian=] has an
_obligation_ to help their [=ward=] do so when the [=ward=]'s abilities aren't
sufficient, even if that conflicts with the [=guardian=]'s desires. In
practice, many [=guardians=] do not make decisions in their [=ward=]'s best
interest, and it's critical that web platform technologies do not exacerbate
the risks inherent in this situation.
[=User agents=] should balance a benevolent [=guardian=]'s need to protect
their [=ward=] from dangers, against a [=ward=]'s need to protect themself
if they have a malicious [=guardian=].
[=User agents=] can protect vulnerable [=wards=] by complying with the principles in
[[[#device-administrators]]], and may only provide information about a [=ward=]
to a [=guardian=] for the purpose of helping that [=guardian=] uphold their
responsibilities to their [=ward=]. The mechanism for doing so must include
measures to help [=wards=] who realize that their [=guardian=] isn't acting in
the [=ward=]'s interest.
<aside class="example" id="example-protective-parent" title="Protective parents">
A parent might configure a small child's [=user agent=] to block access to violent content until the
child is old enough to make their own decisions about it.
</aside>
<aside class="example" id="example-lgbt-kid" title="An LGBT child">
A child may discover that they're LGBT and need to find supportive resources online. If they have a
homophobic or transphobic parent, that parent might have configured their [=user agent=] to either
block or inform the parent when the child visits web pages about LGBT-related subjects. The [=user
agent=] needs to warn the child about how it's configured so that the child can know to ask a better
[=guardian=] for access to the help they need.
</aside>
## Collective Governance {#collective}
Privacy principles are defined through social processes and, because of that, the applicable definition
of [=privacy=] in a given context can be
contested ([[?Privacy-Contested]]). This makes privacy a problem of collective action ([[?GKC-Privacy]]).
Group-level [=data processing=] may impact populations or individuals, including in
ways that [=people=] could not control even under the optimistic assumptions of [=consent=]. For instance,
it's possible that the only thing that a person is willing to reveal to a particular actor is that they
are part of a given group. However, other members of the same group may be interacting with the same
actor and revealing a lot more information, which can enable effective statistical inferences about
people who refrain from providing information about themselves.
What we consider is therefore not just the relation between the [=people=] who share data
and the [=actors=] that invite that sharing ([[?Relational-Turn]]), but also between the [=people=]
who may find themselves categorised indirectly as part of a group even without sharing data. One key
understanding here is that such relations may persist even when data is [=de-identified=]. What's
more, such categorisation of people, voluntary or not, changes the way in which the world operates.
This can produce self-reinforcing loops that can damage both individuals and
groups ([[?Seeing-Like-A-State]]).
In general, collective issues in [=data=] require collective solutions.
Web standards help with [=data governance=] by
defining structural controls in [=user agents=],
ensuring that researchers and regulators can discover group-level abuse,
and establishing or delegating to institutions that can handle issues of [=privacy=].
[=Governance=] will often struggle to achieve its goals if it works primarily by
increasing <em>individual</em> control instead of by collective action.
Collecting data at large scales can have significant pro-social outcomes. Problems tend to
emerge when [=actors=] [=process=] [=data=]
for collective benefit and for [=disloyal=] [=purposes=] at the same time.
The [=disloyal=] [=purposes=] are often justified as bankrolling the pro-social outcomes
but this requires collective oversight to be [=appropriate=].
### Group Privacy {#group-privacy}
There are different ways for [=people=] to become members of a group. Either they can join it
deliberately, making it a self-constituted group such as when joining a club, or they can be
classified into it by an external actor, typically a bureaucracy or its computerised equivalent
([[?Beyond-Individual]]). In the latter case, [=people=] may not be aware that they are being
grouped together, and the definition of the group may not be intelligible (for instance if it is
created from opaque machine learning techniques).
Protecting group privacy can take place at two different levels. The existence of a group or at
least its activities may need to be protected even in cases in which its members are guaranteed to
remain anonymous. We refer to this as "group privacy." Conversely, [=people=] may wish to protect
knowledge that they are members of the group even though the existence of the group and its actions
may be well known (e.g. membership in a dissidents movement under authoritarian rule), which we call
"membership privacy". An example [=privacy violation=] for the former case
is the fitness app Strava that did not reveal individual behaviour or legal identity but published heat
maps of popular running routes. In doing so, it revealed secret US bases around which military
personnel took frequent runs ([[?Strava-Debacle]], [[?Strava-Reveal-Military]]).
People's privacy interests may also be affected when information about a small group of people is
processed, even if no individualized data is exposed. For example, browsing activity of the students
in a classroom may be sensitive even if their teacher doesn't learn exactly which student accessed a
particular resource about a health issue. Targeting presentation of information to a small group may
also be inappropriate: for example, targeting messages to people who visited a particular clinic or
are empaneled on a particular jury may be invasive even without uniquely individual data.
When [=people=] do not know that they are members of a group, when they cannot easily find other
members of the group so as to advocate for their rights together, or when they cannot easily
understand why they are being categorised into a given group, their ability to protect themselves
through self-governing approaches to privacy is largely eliminated.
One common problem in group privacy is when the actions of one member of a group reveal information
that other members would prefer were not shared in this way (or at all). For instance, one person
may publish a picture of an event in which they are featured alongside others while the other people
captured in the same picture would prefer their participation not to be disclosed. Another example
of such issues are sites that enable people to upload their contacts: the person performing the
upload might be more open to disclosing their social networks than the people they are connected to
are. Such issues do not necessarily admit simple, straightforward solutions, but they need to be
carefully considered by people building websites.