<?xml version="1.0"?>
<feed xmlns="http://www.w3.org/2005/Atom" xml:lang="en">
	<id>https://vrarwiki.com/index.php?action=history&amp;feed=atom&amp;title=Lens_array</id>
	<title>Lens array - Revision history</title>
	<link rel="self" type="application/atom+xml" href="https://vrarwiki.com/index.php?action=history&amp;feed=atom&amp;title=Lens_array"/>
	<link rel="alternate" type="text/html" href="https://vrarwiki.com/index.php?title=Lens_array&amp;action=history"/>
	<updated>2026-04-16T13:17:01Z</updated>
	<subtitle>Revision history for this page on the wiki</subtitle>
	<generator>MediaWiki 1.43.0</generator>
	<entry>
		<id>https://vrarwiki.com/index.php?title=Lens_array&amp;diff=34997&amp;oldid=prev</id>
		<title>Xinreality at 08:50, 3 May 2025</title>
		<link rel="alternate" type="text/html" href="https://vrarwiki.com/index.php?title=Lens_array&amp;diff=34997&amp;oldid=prev"/>
		<updated>2025-05-03T08:50:32Z</updated>

		<summary type="html">&lt;p&gt;&lt;/p&gt;
&lt;table style=&quot;background-color: #fff; color: #202122;&quot; data-mw=&quot;interface&quot;&gt;
				&lt;col class=&quot;diff-marker&quot; /&gt;
				&lt;col class=&quot;diff-content&quot; /&gt;
				&lt;col class=&quot;diff-marker&quot; /&gt;
				&lt;col class=&quot;diff-content&quot; /&gt;
				&lt;tr class=&quot;diff-title&quot; lang=&quot;en&quot;&gt;
				&lt;td colspan=&quot;2&quot; style=&quot;background-color: #fff; color: #202122; text-align: center;&quot;&gt;← Older revision&lt;/td&gt;
				&lt;td colspan=&quot;2&quot; style=&quot;background-color: #fff; color: #202122; text-align: center;&quot;&gt;Revision as of 08:50, 3 May 2025&lt;/td&gt;
				&lt;/tr&gt;&lt;tr&gt;&lt;td colspan=&quot;2&quot; class=&quot;diff-lineno&quot; id=&quot;mw-diff-left-l13&quot;&gt;Line 13:&lt;/td&gt;
&lt;td colspan=&quot;2&quot; class=&quot;diff-lineno&quot;&gt;Line 13:&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&amp;#039;&amp;#039;&amp;#039;Lenticular arrays:&amp;#039;&amp;#039;&amp;#039; Arrays of cylindrical microlenses (lenticules) arranged one-dimensionally or two-dimensionally. These produce multiple horizontal viewing zones in glasses-free 3D displays. For example, a lenticular lens array can restrict the exit pupil to certain angles, enabling light-field panels that show different images to each eye.&amp;lt;ref name=&amp;quot;Balogh2023&amp;quot;&amp;gt;&amp;lt;/ref&amp;gt; Such arrays are widely used in glasses-free 3D signage and have been adapted to VR/AR light-field display prototypes.&lt;/div&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&amp;#039;&amp;#039;&amp;#039;Lenticular arrays:&amp;#039;&amp;#039;&amp;#039; Arrays of cylindrical microlenses (lenticules) arranged one-dimensionally or two-dimensionally. These produce multiple horizontal viewing zones in glasses-free 3D displays. For example, a lenticular lens array can restrict the exit pupil to certain angles, enabling light-field panels that show different images to each eye.&amp;lt;ref name=&amp;quot;Balogh2023&amp;quot;&amp;gt;&amp;lt;/ref&amp;gt; Such arrays are widely used in glasses-free 3D signage and have been adapted to VR/AR light-field display prototypes.&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;−&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #ffe49c; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&#039;&#039;&#039;Holographic optical element (HOE) arrays:&#039;&#039;&#039; These use diffractive hologram patterns that act like an array of lenses. In AR waveguide combiners, &#039;&#039;lens-array holographic optical elements&#039;&#039; have been used to form 2D/3D transparent display screens.&amp;lt;ref name=&quot;Liu2012&quot;&amp;gt;&lt;del style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;Liu YS, Kuo CY, Hwang CC, et al. Two-dimensional and three-dimensional see-through screen using holographic optical elements. Digital Holography and Three-Dimensional Imaging. 2012;DM2C.6.&lt;/del&gt;&amp;lt;/ref&amp;gt; A HOE can replace a physical lens array by encoding lens behavior into a recorded interference pattern. In one prototype, a &#039;&#039;lens-array HOE&#039;&#039; was created to build a see-through AR screen.&amp;lt;ref name=&quot;Liu2012&quot; /&amp;gt; Other works use holographic micromirror arrays in conjunction with MLAs to couple images into waveguides.&amp;lt;ref name=&quot;Jang2021&quot; /&amp;gt;&lt;/div&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;+&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #a3d3ff; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&#039;&#039;&#039;Holographic optical element (HOE) arrays:&#039;&#039;&#039; These use diffractive hologram patterns that act like an array of lenses. In AR waveguide combiners, &#039;&#039;lens-array holographic optical elements&#039;&#039; have been used to form 2D/3D transparent display screens.&amp;lt;ref name=&quot;Liu2012&quot;&amp;gt;&amp;lt;/ref&amp;gt; A HOE can replace a physical lens array by encoding lens behavior into a recorded interference pattern. In one prototype, a &#039;&#039;lens-array HOE&#039;&#039; was created to build a see-through AR screen.&amp;lt;ref name=&quot;Liu2012&quot; /&amp;gt; Other works use holographic micromirror arrays in conjunction with MLAs to couple images into waveguides.&amp;lt;ref name=&quot;Jang2021&quot; /&amp;gt;&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&amp;#039;&amp;#039;&amp;#039;Liquid crystal / tunable lens arrays:&amp;#039;&amp;#039;&amp;#039; Some arrays use liquid crystal (LC) or fluidic lenses whose optical power can be electronically changed. For example, a chiral (polarization-sensitive) LC lens array was demonstrated in an AR system to steer light and break conventional FOV limits.&amp;lt;ref name=&amp;quot;Wei2023&amp;quot; /&amp;gt; Variable-focus MLAs can allow dynamic focus adjustment or multi-focal displays.&lt;/div&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&amp;#039;&amp;#039;&amp;#039;Liquid crystal / tunable lens arrays:&amp;#039;&amp;#039;&amp;#039; Some arrays use liquid crystal (LC) or fluidic lenses whose optical power can be electronically changed. For example, a chiral (polarization-sensitive) LC lens array was demonstrated in an AR system to steer light and break conventional FOV limits.&amp;lt;ref name=&amp;quot;Wei2023&amp;quot; /&amp;gt; Variable-focus MLAs can allow dynamic focus adjustment or multi-focal displays.&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;/table&gt;</summary>
		<author><name>Xinreality</name></author>
	</entry>
	<entry>
		<id>https://vrarwiki.com/index.php?title=Lens_array&amp;diff=34996&amp;oldid=prev</id>
		<title>Xinreality at 08:49, 3 May 2025</title>
		<link rel="alternate" type="text/html" href="https://vrarwiki.com/index.php?title=Lens_array&amp;diff=34996&amp;oldid=prev"/>
		<updated>2025-05-03T08:49:45Z</updated>

		<summary type="html">&lt;p&gt;&lt;/p&gt;
&lt;table style=&quot;background-color: #fff; color: #202122;&quot; data-mw=&quot;interface&quot;&gt;
				&lt;col class=&quot;diff-marker&quot; /&gt;
				&lt;col class=&quot;diff-content&quot; /&gt;
				&lt;col class=&quot;diff-marker&quot; /&gt;
				&lt;col class=&quot;diff-content&quot; /&gt;
				&lt;tr class=&quot;diff-title&quot; lang=&quot;en&quot;&gt;
				&lt;td colspan=&quot;2&quot; style=&quot;background-color: #fff; color: #202122; text-align: center;&quot;&gt;← Older revision&lt;/td&gt;
				&lt;td colspan=&quot;2&quot; style=&quot;background-color: #fff; color: #202122; text-align: center;&quot;&gt;Revision as of 08:49, 3 May 2025&lt;/td&gt;
				&lt;/tr&gt;&lt;tr&gt;&lt;td colspan=&quot;2&quot; class=&quot;diff-lineno&quot; id=&quot;mw-diff-left-l1&quot;&gt;Line 1:&lt;/td&gt;
&lt;td colspan=&quot;2&quot; class=&quot;diff-lineno&quot;&gt;Line 1:&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;{{see also|Terms|Technical Terms}}&lt;/div&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;{{see also|Terms|Technical Terms}}&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;−&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #ffe49c; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;[[Lens array]]s are two-dimensional arrangements of many small lenses (often [[Microlens arrays]]) that manipulate [[light fields]] for imaging or display. In [[Virtual reality]] (VR) and [[Augmented reality]] (AR) systems, lens arrays serve two broad roles: as &#039;&#039;&#039;display optics&#039;&#039;&#039; that create 3D or light-field images, and as &#039;&#039;&#039;sensor optics&#039;&#039;&#039; that capture directional light for depth and eye tracking. In displays, lens arrays enable multi-view and focal-plane rendering (e.g. light-field displays or integral imaging) by splitting the image into many sub-images corresponding to different angles or depths.&amp;lt;ref name=&quot;Li2019&quot;&amp;gt;&lt;del style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;Li X, Chen L, Li Y, et al. A broadband achromatic metalens array for integral imaging in the visible. Light Sci Appl. 2019;8:99.&lt;/del&gt;&amp;lt;/ref&amp;gt;&amp;lt;ref name=&quot;Ng2005&quot;&amp;gt;&lt;del style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;Ng R, Levoy M, Brédif M, et al. Light field photography with a hand-held plenoptic camera. Computer Science Technical Report. 2005;2(11):1-11.&lt;/del&gt;&amp;lt;/ref&amp;gt; In sensing, microlens-based &#039;&#039;plenoptic&#039;&#039; or light-field cameras capture the full 4D light field, allowing computational refocusing and depth estimation.&amp;lt;ref name=&quot;Ng2005&quot; /&amp;gt; &amp;lt;ref name=&quot;Yang2018&quot;&amp;gt;&lt;del style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;Yang L, Guo Y. Eye tracking using a light field camera on a head-mounted display. US Patent Application US20180173303A1. 2018 Jun 21.&lt;/del&gt;&amp;lt;/ref&amp;gt; Modern VR/AR prototypes leverage microlens arrays, [[Light field display]] techniques, [[Integral imaging]], holographic waveguide couplers, and specialized lens-array modules for eye tracking and depth sensing. These components appear in devices such as wide-FOV near-eye displays and optical see-through [[Head-mounted display]]s.&lt;/div&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;+&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #a3d3ff; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;[[Lens array]]s are two-dimensional arrangements of many small lenses (often [[Microlens arrays]]) that manipulate [[light fields]] for imaging or display. In [[Virtual reality]] (VR) and [[Augmented reality]] (AR) systems, lens arrays serve two broad roles: as &#039;&#039;&#039;display optics&#039;&#039;&#039; that create 3D or light-field images, and as &#039;&#039;&#039;sensor optics&#039;&#039;&#039; that capture directional light for depth and eye tracking. In displays, lens arrays enable multi-view and focal-plane rendering (e.g. light-field displays or integral imaging) by splitting the image into many sub-images corresponding to different angles or depths.&amp;lt;ref name=&quot;Li2019&quot;&amp;gt;&amp;lt;/ref&amp;gt;&amp;lt;ref name=&quot;Ng2005&quot;&amp;gt;&amp;lt;/ref&amp;gt; In sensing, microlens-based &#039;&#039;plenoptic&#039;&#039; or light-field cameras capture the full 4D light field, allowing computational refocusing and depth estimation.&amp;lt;ref name=&quot;Ng2005&quot; /&amp;gt; &amp;lt;ref name=&quot;Yang2018&quot;&amp;gt;&amp;lt;/ref&amp;gt; Modern VR/AR prototypes leverage microlens arrays, [[Light field display]] techniques, [[Integral imaging]], holographic waveguide couplers, and specialized lens-array modules for eye tracking and depth sensing. These components appear in devices such as wide-FOV near-eye displays and optical see-through [[Head-mounted display]]s.&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;==History==&lt;/div&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;==History==&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;−&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #ffe49c; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;Lens-array technology traces back over a century. Gabriel Lippmann first proposed &quot;integral photography&quot; in 1908, capturing 3D scenes via a lens grid.&amp;lt;ref name=&quot;Li2019&quot; /&amp;gt; Early implementations used pinhole arrays (circa 1911) and later simple microlens plates (around 1948) to record and replay light fields.&amp;lt;ref name=&quot;Li2019&quot; /&amp;gt; In the mid-20th century, lenticular (cylindrical lens) sheets became popular for autostereoscopic prints and displays (e.g. 3D postcards and packaging), providing separate views for each eye. By the 2000s, advances in digital displays and microfabrication revived lens-array research for head-worn displays. For example, smartphone-scale integral imaging was demonstrated by pairing a display with a matching MLA.&amp;lt;ref name=&quot;Li2019&quot; /&amp;gt; In recent years, VR/AR research has produced thin, wide-FOV near-eye displays using sophisticated lens arrays (e.g. polarization optics or metasurfaces)&amp;lt;ref name=&quot;Shin2023&quot;&amp;gt;&lt;del style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;Shin G, Lee Y, Kim J, et al. Field of view and angular-resolution enhancement in microlens array type virtual reality near-eye display using polarization grating. PubMed. 2023;39876217.&lt;/del&gt;&amp;lt;/ref&amp;gt;, as well as compact eye-tracking and depth cameras using microlens arrays.&amp;lt;ref name=&quot;Yang2018&quot; /&amp;gt;&amp;lt;ref name=&quot;Microsoft2020&quot;&amp;gt;&lt;del style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;Microsoft. Camera comprising lens array. Patent Nweon. 2020;30768.&lt;/del&gt;&amp;lt;/ref&amp;gt;&lt;/div&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;+&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #a3d3ff; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;Lens-array technology traces back over a century. Gabriel Lippmann first proposed &quot;integral photography&quot; in 1908, capturing 3D scenes via a lens grid.&amp;lt;ref name=&quot;Li2019&quot; /&amp;gt; Early implementations used pinhole arrays (circa 1911) and later simple microlens plates (around 1948) to record and replay light fields.&amp;lt;ref name=&quot;Li2019&quot; /&amp;gt; In the mid-20th century, lenticular (cylindrical lens) sheets became popular for autostereoscopic prints and displays (e.g. 3D postcards and packaging), providing separate views for each eye. By the 2000s, advances in digital displays and microfabrication revived lens-array research for head-worn displays. For example, smartphone-scale integral imaging was demonstrated by pairing a display with a matching MLA.&amp;lt;ref name=&quot;Li2019&quot; /&amp;gt; In recent years, VR/AR research has produced thin, wide-FOV near-eye displays using sophisticated lens arrays (e.g. polarization optics or metasurfaces)&amp;lt;ref name=&quot;Shin2023&quot;&amp;gt;&amp;lt;/ref&amp;gt;, as well as compact eye-tracking and depth cameras using microlens arrays.&amp;lt;ref name=&quot;Yang2018&quot; /&amp;gt;&amp;lt;ref name=&quot;Microsoft2020&quot;&amp;gt;&amp;lt;/ref&amp;gt;&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;== Types of lens arrays ==&lt;/div&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;== Types of lens arrays ==&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td colspan=&quot;2&quot; class=&quot;diff-lineno&quot; id=&quot;mw-diff-left-l9&quot;&gt;Line 9:&lt;/td&gt;
&lt;td colspan=&quot;2&quot; class=&quot;diff-lineno&quot;&gt;Line 9:&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;Lens arrays in VR/AR come in several varieties:&lt;/div&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;Lens arrays in VR/AR come in several varieties:&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;−&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #ffe49c; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&#039;&#039;&#039;Spherical microlens arrays:&#039;&#039;&#039; Regular arrays of small convex (often spherical or aspheric) lenses. These planar MLAs are common for light-field displays and cameras. Pitch (spacing) can range from tens of micrometers (in cameras) up to a few millimeters (in HMD displays).&amp;lt;ref name=&quot;Wei2023&quot;&amp;gt;&lt;del style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;Wei K. Near-eye augmented reality display using wide field-of-view scanning polarization pupil replication. University of California, Berkeley. 2023.&lt;/del&gt;&amp;lt;/ref&amp;gt;&amp;lt;ref name=&quot;Jang2021&quot;&amp;gt;&lt;del style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;Jang C, Bang K, Asaduzzaman A, Lee S, Lee B. Three-dimensional see-through augmented-reality display system using a holographic micromirror array. Applied Optics. 2021;60(25):7545-7553.&lt;/del&gt;&amp;lt;/ref&amp;gt; Each lenslet has a focal length chosen to suit the application (e.g. to collimate a display or focus on a sensor).&lt;/div&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;+&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #a3d3ff; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&#039;&#039;&#039;Spherical microlens arrays:&#039;&#039;&#039; Regular arrays of small convex (often spherical or aspheric) lenses. These planar MLAs are common for light-field displays and cameras. Pitch (spacing) can range from tens of micrometers (in cameras) up to a few millimeters (in HMD displays).&amp;lt;ref name=&quot;Wei2023&quot;&amp;gt;&amp;lt;/ref&amp;gt;&amp;lt;ref name=&quot;Jang2021&quot;&amp;gt;&amp;lt;/ref&amp;gt; Each lenslet has a focal length chosen to suit the application (e.g. to collimate a display or focus on a sensor).&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;−&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #ffe49c; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&#039;&#039;&#039;Lenticular arrays:&#039;&#039;&#039; Arrays of cylindrical microlenses (lenticules) arranged one-dimensionally or two-dimensionally. These produce multiple horizontal viewing zones in glasses-free 3D displays. For example, a lenticular lens array can restrict the exit pupil to certain angles, enabling light-field panels that show different images to each eye.&amp;lt;ref name=&quot;Balogh2023&quot;&amp;gt;&lt;del style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;Balogh T, Nagy Z, Kerbel I, et al. Directional and Eye-Tracking Light Field Display with Efficient Rendering and Illumination. PMC. 2023;10385613.&lt;/del&gt;&amp;lt;/ref&amp;gt; Such arrays are widely used in glasses-free 3D signage and have been adapted to VR/AR light-field display prototypes.&lt;/div&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;+&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #a3d3ff; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&#039;&#039;&#039;Lenticular arrays:&#039;&#039;&#039; Arrays of cylindrical microlenses (lenticules) arranged one-dimensionally or two-dimensionally. These produce multiple horizontal viewing zones in glasses-free 3D displays. For example, a lenticular lens array can restrict the exit pupil to certain angles, enabling light-field panels that show different images to each eye.&amp;lt;ref name=&quot;Balogh2023&quot;&amp;gt;&amp;lt;/ref&amp;gt; Such arrays are widely used in glasses-free 3D signage and have been adapted to VR/AR light-field display prototypes.&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&amp;#039;&amp;#039;&amp;#039;Holographic optical element (HOE) arrays:&amp;#039;&amp;#039;&amp;#039; These use diffractive hologram patterns that act like an array of lenses. In AR waveguide combiners, &amp;#039;&amp;#039;lens-array holographic optical elements&amp;#039;&amp;#039; have been used to form 2D/3D transparent display screens.&amp;lt;ref name=&amp;quot;Liu2012&amp;quot;&amp;gt;Liu YS, Kuo CY, Hwang CC, et al. Two-dimensional and three-dimensional see-through screen using holographic optical elements. Digital Holography and Three-Dimensional Imaging. 2012;DM2C.6.&amp;lt;/ref&amp;gt; A HOE can replace a physical lens array by encoding lens behavior into a recorded interference pattern. In one prototype, a &amp;#039;&amp;#039;lens-array HOE&amp;#039;&amp;#039; was created to build a see-through AR screen.&amp;lt;ref name=&amp;quot;Liu2012&amp;quot; /&amp;gt; Other works use holographic micromirror arrays in conjunction with MLAs to couple images into waveguides.&amp;lt;ref name=&amp;quot;Jang2021&amp;quot; /&amp;gt;&lt;/div&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&amp;#039;&amp;#039;&amp;#039;Holographic optical element (HOE) arrays:&amp;#039;&amp;#039;&amp;#039; These use diffractive hologram patterns that act like an array of lenses. In AR waveguide combiners, &amp;#039;&amp;#039;lens-array holographic optical elements&amp;#039;&amp;#039; have been used to form 2D/3D transparent display screens.&amp;lt;ref name=&amp;quot;Liu2012&amp;quot;&amp;gt;Liu YS, Kuo CY, Hwang CC, et al. Two-dimensional and three-dimensional see-through screen using holographic optical elements. Digital Holography and Three-Dimensional Imaging. 2012;DM2C.6.&amp;lt;/ref&amp;gt; A HOE can replace a physical lens array by encoding lens behavior into a recorded interference pattern. In one prototype, a &amp;#039;&amp;#039;lens-array HOE&amp;#039;&amp;#039; was created to build a see-through AR screen.&amp;lt;ref name=&amp;quot;Liu2012&amp;quot; /&amp;gt; Other works use holographic micromirror arrays in conjunction with MLAs to couple images into waveguides.&amp;lt;ref name=&amp;quot;Jang2021&amp;quot; /&amp;gt;&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td colspan=&quot;2&quot; class=&quot;diff-lineno&quot; id=&quot;mw-diff-left-l76&quot;&gt;Line 76:&lt;/td&gt;
&lt;td colspan=&quot;2&quot; class=&quot;diff-lineno&quot;&gt;Line 76:&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;Research on lens-array technology is advancing rapidly. &amp;#039;&amp;#039;&amp;#039;Adaptive optics&amp;#039;&amp;#039;&amp;#039; will likely play a growing role. Arrays of liquid-crystal or shape-changing lenses could allow dynamic focus control and multi-focal displays (reducing vergence-accommodation conflict). Similarly, &amp;#039;&amp;#039;&amp;#039;dynamic wavelength control&amp;#039;&amp;#039;&amp;#039; (e.g. polarization or tunable filters in each lenslet) could enable spatiotemporal multiplexing for color and focus.&lt;/div&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;Research on lens-array technology is advancing rapidly. &amp;#039;&amp;#039;&amp;#039;Adaptive optics&amp;#039;&amp;#039;&amp;#039; will likely play a growing role. Arrays of liquid-crystal or shape-changing lenses could allow dynamic focus control and multi-focal displays (reducing vergence-accommodation conflict). Similarly, &amp;#039;&amp;#039;&amp;#039;dynamic wavelength control&amp;#039;&amp;#039;&amp;#039; (e.g. polarization or tunable filters in each lenslet) could enable spatiotemporal multiplexing for color and focus.&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;−&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #ffe49c; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&#039;&#039;&#039;Metasurfaces and flat optics&#039;&#039;&#039; are a major trend. Recent work has demonstrated &#039;&#039;achromatic metasurface waveguides&#039;&#039; for AR: for example, a 2025 Light:Science &amp;amp;Apps paper introduced inverse-designed metasurface couplers that eliminate chromatic aberration across the full visible spectrum and achieve ~45° FOV.&amp;lt;ref name=&quot;Achromatic2025&quot;&amp;gt;&lt;del style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;An achromatic metasurface waveguide for augmented reality displays. Light Sci Appl. 2025;41377-025-01761.&lt;/del&gt;&amp;lt;/ref&amp;gt; These metasurface lens arrays are ultrathin and could replace bulky refractive MLAs in future headsets. Cholesteric liquid-crystal metasurface (chiral) lens arrays have already been used to break the field-of-view limit in a scanning AR display.&amp;lt;ref name=&quot;Wei2023&quot; /&amp;gt;&lt;/div&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;+&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #a3d3ff; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&#039;&#039;&#039;Metasurfaces and flat optics&#039;&#039;&#039; are a major trend. Recent work has demonstrated &#039;&#039;achromatic metasurface waveguides&#039;&#039; for AR: for example, a 2025 Light:Science &amp;amp;Apps paper introduced inverse-designed metasurface couplers that eliminate chromatic aberration across the full visible spectrum and achieve ~45° FOV.&amp;lt;ref name=&quot;Achromatic2025&quot;&amp;gt;&amp;lt;/ref&amp;gt; These metasurface lens arrays are ultrathin and could replace bulky refractive MLAs in future headsets. Cholesteric liquid-crystal metasurface (chiral) lens arrays have already been used to break the field-of-view limit in a scanning AR display.&amp;lt;ref name=&quot;Wei2023&quot; /&amp;gt;&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&amp;#039;&amp;#039;&amp;#039;Integration and compute-optics co-design&amp;#039;&amp;#039;&amp;#039; will improve performance. Headsets may co-optimize lens arrays with on-sensor processing. For instance, a microlens array camera could perform onboard refocusing or eye-pose estimation in hardware. Conversely, display side, algorithms could pre-distort images to compensate residual lens aberrations.&lt;/div&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&amp;#039;&amp;#039;&amp;#039;Integration and compute-optics co-design&amp;#039;&amp;#039;&amp;#039; will improve performance. Headsets may co-optimize lens arrays with on-sensor processing. For instance, a microlens array camera could perform onboard refocusing or eye-pose estimation in hardware. Conversely, display side, algorithms could pre-distort images to compensate residual lens aberrations.&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;/table&gt;</summary>
		<author><name>Xinreality</name></author>
	</entry>
	<entry>
		<id>https://vrarwiki.com/index.php?title=Lens_array&amp;diff=34995&amp;oldid=prev</id>
		<title>Xinreality at 08:47, 3 May 2025</title>
		<link rel="alternate" type="text/html" href="https://vrarwiki.com/index.php?title=Lens_array&amp;diff=34995&amp;oldid=prev"/>
		<updated>2025-05-03T08:47:50Z</updated>

		<summary type="html">&lt;p&gt;&lt;/p&gt;
&lt;table style=&quot;background-color: #fff; color: #202122;&quot; data-mw=&quot;interface&quot;&gt;
				&lt;col class=&quot;diff-marker&quot; /&gt;
				&lt;col class=&quot;diff-content&quot; /&gt;
				&lt;col class=&quot;diff-marker&quot; /&gt;
				&lt;col class=&quot;diff-content&quot; /&gt;
				&lt;tr class=&quot;diff-title&quot; lang=&quot;en&quot;&gt;
				&lt;td colspan=&quot;2&quot; style=&quot;background-color: #fff; color: #202122; text-align: center;&quot;&gt;← Older revision&lt;/td&gt;
				&lt;td colspan=&quot;2&quot; style=&quot;background-color: #fff; color: #202122; text-align: center;&quot;&gt;Revision as of 08:47, 3 May 2025&lt;/td&gt;
				&lt;/tr&gt;&lt;tr&gt;&lt;td colspan=&quot;2&quot; class=&quot;diff-lineno&quot; id=&quot;mw-diff-left-l87&quot;&gt;Line 87:&lt;/td&gt;
&lt;td colspan=&quot;2&quot; class=&quot;diff-lineno&quot;&gt;Line 87:&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;==References==&lt;/div&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;==References==&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;−&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #ffe49c; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&amp;lt;references /&amp;gt;&lt;/div&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;+&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #a3d3ff; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&amp;lt;references&lt;ins style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;&amp;gt;&lt;/ins&gt;&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td colspan=&quot;2&quot; class=&quot;diff-side-deleted&quot;&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;+&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #a3d3ff; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&lt;ins style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;&amp;lt;ref name=&quot;Li2019&quot;&amp;gt;Li, X.; Chen, L.; Li, Y.; &amp;lt;i&amp;gt;et al.&amp;lt;/i&amp;gt; “A Broadband Achromatic Metalens Array for Integral Imaging in the Visible.” &#039;&#039;Light: Science &amp;amp;amp; Applications&#039;&#039; &amp;lt;b&amp;gt;8&amp;lt;/b&amp;gt;, 99 (2019). https://doi.org/10.1038/s41377‑019‑0197‑4&amp;lt;/ref&amp;gt;&lt;/ins&gt;&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td colspan=&quot;2&quot; class=&quot;diff-side-deleted&quot;&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;+&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #a3d3ff; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&lt;ins style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;&amp;lt;ref name=&quot;Ng2005&quot;&amp;gt;Ng, R.; Levoy, M.; Brédif, M.; &amp;lt;i&amp;gt;et al.&amp;lt;/i&amp;gt; “Light Field Photography with a Hand‑Held Plenoptic Camera.” Stanford CSTR 2005‑02 (2005). http://graphics.stanford.edu/papers/lfcamera/&amp;lt;/ref&amp;gt;&lt;/ins&gt;&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td colspan=&quot;2&quot; class=&quot;diff-side-deleted&quot;&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;+&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #a3d3ff; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&lt;ins style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;&amp;lt;ref name=&quot;Yang2018&quot;&amp;gt;Yang, L.; Guo, Y. “Eye Tracking Using a Light Field Camera on a Head‑Mounted Display.” US Patent Application &amp;lt;b&amp;gt;US 2018/0173303 A1&amp;lt;/b&amp;gt;, 21 June 2018.&amp;lt;/ref&amp;gt;&lt;/ins&gt;&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td colspan=&quot;2&quot; class=&quot;diff-side-deleted&quot;&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;+&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #a3d3ff; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&lt;ins style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;&amp;lt;ref name=&quot;Shin2023&quot;&amp;gt;Shin, K‑S.; Hong, J.; Han, W.; Park, J‑H. “Field of View and Angular‑Resolution Enhancement in Microlens‑Array‑Type VR Near‑Eye Display Using Polarization Grating.” &#039;&#039;Optics Express&#039;&#039; &amp;lt;b&amp;gt;33&amp;lt;/b&amp;gt;(1): 263‑278 (2025). https://doi.org/10.1364/OE.546812&amp;lt;/ref&amp;gt;&lt;/ins&gt;&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td colspan=&quot;2&quot; class=&quot;diff-side-deleted&quot;&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;+&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #a3d3ff; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&lt;ins style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;&amp;lt;ref name=&quot;Microsoft2020&quot;&amp;gt;Microsoft Technology Licensing LLC. “Camera Comprising Lens Array.” US Patent Application &amp;lt;b&amp;gt;US 2023/0319428 A1&amp;lt;/b&amp;gt;, 5 October 2023.&amp;lt;/ref&amp;gt;&lt;/ins&gt;&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td colspan=&quot;2&quot; class=&quot;diff-side-deleted&quot;&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;+&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #a3d3ff; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&lt;ins style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;&amp;lt;ref name=&quot;Wei2023&quot;&amp;gt;Weng, Y.; Zhang, Y.; Wang, W.; &amp;lt;i&amp;gt;et al.&amp;lt;/i&amp;gt; “High‑Efficiency and Compact Two‑Dimensional Exit Pupil Expansion Design for Diffractive Waveguide Based on Polarization Volume Grating.” &#039;&#039;Optics Express&#039;&#039; &amp;lt;b&amp;gt;31&amp;lt;/b&amp;gt;(4): 6601‑6614 (2023). https://doi.org/10.1364/OE.482447&amp;lt;/ref&amp;gt;&lt;/ins&gt;&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td colspan=&quot;2&quot; class=&quot;diff-side-deleted&quot;&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;+&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #a3d3ff; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&lt;ins style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;&amp;lt;ref name=&quot;Jang2021&quot;&amp;gt;Darkhanbaatar, N.; Erdenebat, M‑U.; Shin, C‑W.; &amp;lt;i&amp;gt;et al.&amp;lt;/i&amp;gt; “Three‑Dimensional See‑Through Augmented‑Reality Display System Using a Holographic Micromirror Array.” &#039;&#039;Applied Optics&#039;&#039; &amp;lt;b&amp;gt;60&amp;lt;/b&amp;gt;(25): 7545‑7551 (2021). https://doi.org/10.1364/AO.428364&amp;lt;/ref&amp;gt;&lt;/ins&gt;&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td colspan=&quot;2&quot; class=&quot;diff-side-deleted&quot;&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;+&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #a3d3ff; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&lt;ins style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;&amp;lt;ref name=&quot;Balogh2023&quot;&amp;gt;Zhang, G.; He, Y.; Liang, H.; &amp;lt;i&amp;gt;et al.&amp;lt;/i&amp;gt; “Directional and Eye‑Tracking Light Field Display with Efficient Rendering and Illumination.” &#039;&#039;Micromachines&#039;&#039; &amp;lt;b&amp;gt;14&amp;lt;/b&amp;gt;(7): 1465 (2023). https://doi.org/10.3390/mi14071465&amp;lt;/ref&amp;gt;&lt;/ins&gt;&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td colspan=&quot;2&quot; class=&quot;diff-side-deleted&quot;&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;+&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #a3d3ff; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&lt;ins style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;&amp;lt;ref name=&quot;Liu2012&quot;&amp;gt;Hong, K.; Hong, J.; Yeom, J.; Lee, B. “Two‑Dimensional and Three‑Dimensional See‑Through Screen Using Holographic Optical Elements.” In &amp;lt;i&amp;gt;Digital Holography and Three‑Dimensional Imaging 2012&amp;lt;/i&amp;gt;, paper DM2C.6. Optical Society of America (2012). https://doi.org/10.1364/DH.2012.DM2C.6&amp;lt;/ref&amp;gt;&lt;/ins&gt;&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td colspan=&quot;2&quot; class=&quot;diff-side-deleted&quot;&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;+&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #a3d3ff; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&lt;ins style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;&amp;lt;ref name=&quot;Achromatic2025&quot;&amp;gt;Tian, Z.; Zhu, X.; Surman, P.; &amp;lt;i&amp;gt;et al.&amp;lt;/i&amp;gt; “An Achromatic Metasurface Waveguide for Augmented Reality Displays.” &#039;&#039;Light: Science &amp;amp;amp; Applications&#039;&#039; &amp;lt;b&amp;gt;14&amp;lt;/b&amp;gt;, 94 (2025). https://doi.org/10.1038/s41377‑025‑01761‑w&amp;lt;/ref&amp;gt;&lt;/ins&gt;&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td colspan=&quot;2&quot; class=&quot;diff-side-deleted&quot;&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;+&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #a3d3ff; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&lt;ins style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;&amp;lt;&lt;/ins&gt;/&lt;ins style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;references&lt;/ins&gt;&amp;gt;&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td colspan=&quot;2&quot; class=&quot;diff-side-deleted&quot;&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;+&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #a3d3ff; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt; &lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;[[Category:Terms]]&lt;/div&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;[[Category:Terms]]&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;/table&gt;</summary>
		<author><name>Xinreality</name></author>
	</entry>
	<entry>
		<id>https://vrarwiki.com/index.php?title=Lens_array&amp;diff=34717&amp;oldid=prev</id>
		<title>Xinreality at 03:16, 30 April 2025</title>
		<link rel="alternate" type="text/html" href="https://vrarwiki.com/index.php?title=Lens_array&amp;diff=34717&amp;oldid=prev"/>
		<updated>2025-04-30T03:16:04Z</updated>

		<summary type="html">&lt;p&gt;&lt;/p&gt;
&lt;table style=&quot;background-color: #fff; color: #202122;&quot; data-mw=&quot;interface&quot;&gt;
				&lt;col class=&quot;diff-marker&quot; /&gt;
				&lt;col class=&quot;diff-content&quot; /&gt;
				&lt;col class=&quot;diff-marker&quot; /&gt;
				&lt;col class=&quot;diff-content&quot; /&gt;
				&lt;tr class=&quot;diff-title&quot; lang=&quot;en&quot;&gt;
				&lt;td colspan=&quot;2&quot; style=&quot;background-color: #fff; color: #202122; text-align: center;&quot;&gt;← Older revision&lt;/td&gt;
				&lt;td colspan=&quot;2&quot; style=&quot;background-color: #fff; color: #202122; text-align: center;&quot;&gt;Revision as of 03:16, 30 April 2025&lt;/td&gt;
				&lt;/tr&gt;&lt;tr&gt;&lt;td colspan=&quot;2&quot; class=&quot;diff-lineno&quot; id=&quot;mw-diff-left-l1&quot;&gt;Line 1:&lt;/td&gt;
&lt;td colspan=&quot;2&quot; class=&quot;diff-lineno&quot;&gt;Line 1:&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;{{see also|Terms|Technical Terms}}&lt;/div&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;{{see also|Terms|Technical Terms}}&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;−&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #ffe49c; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;Lens &lt;del style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;arrays &lt;/del&gt;are two-dimensional arrangements of many small lenses (often [[Microlens &lt;del style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;array&lt;/del&gt;]]&lt;del style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;s&lt;/del&gt;) that manipulate light fields for imaging or display. In [[Virtual reality]] (VR) and [[Augmented reality]] (AR) systems, lens arrays serve two broad roles: as &#039;&#039;&#039;display optics&#039;&#039;&#039; that create 3D or light-field images, and as &#039;&#039;&#039;sensor optics&#039;&#039;&#039; that capture directional light for depth and eye tracking. In displays, lens arrays enable multi-view and focal-plane rendering (e.g. light-field displays or integral imaging) by splitting the image into many sub-images corresponding to different angles or depths.&amp;lt;ref name=&quot;Li2019&quot;&amp;gt;Li X, Chen L, Li Y, et al. A broadband achromatic metalens array for integral imaging in the visible. Light Sci Appl. 2019;8:99.&amp;lt;/ref&amp;gt;&amp;lt;ref name=&quot;Ng2005&quot;&amp;gt;Ng R, Levoy M, Brédif M, et al. Light field photography with a hand-held plenoptic camera. Computer Science Technical Report. 2005;2(11):1-11.&amp;lt;/ref&amp;gt; In sensing, microlens-based &#039;&#039;plenoptic&#039;&#039; or light-field cameras capture the full 4D light field, allowing computational refocusing and depth estimation.&amp;lt;ref name=&quot;Ng2005&quot; /&amp;gt; &amp;lt;ref name=&quot;Yang2018&quot;&amp;gt;Yang L, Guo Y. Eye tracking using a light field camera on a head-mounted display. US Patent Application US20180173303A1. 2018 Jun 21.&amp;lt;/ref&amp;gt; Modern VR/AR prototypes leverage microlens arrays, [[Light field display]] techniques, [[Integral imaging]], holographic waveguide couplers, and specialized lens-array modules for eye tracking and depth sensing. These components appear in devices such as wide-FOV near-eye displays and optical see-through [[Head-mounted display]]s.&lt;/div&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;+&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #a3d3ff; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&lt;ins style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;[[&lt;/ins&gt;Lens &lt;ins style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;array]]s &lt;/ins&gt;are two-dimensional arrangements of many small lenses (often [[Microlens &lt;ins style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;arrays&lt;/ins&gt;]]) that manipulate &lt;ins style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;[[&lt;/ins&gt;light fields&lt;ins style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;]] &lt;/ins&gt;for imaging or display. In [[Virtual reality]] (VR) and [[Augmented reality]] (AR) systems, lens arrays serve two broad roles: as &#039;&#039;&#039;display optics&#039;&#039;&#039; that create 3D or light-field images, and as &#039;&#039;&#039;sensor optics&#039;&#039;&#039; that capture directional light for depth and eye tracking. In displays, lens arrays enable multi-view and focal-plane rendering (e.g. light-field displays or integral imaging) by splitting the image into many sub-images corresponding to different angles or depths.&amp;lt;ref name=&quot;Li2019&quot;&amp;gt;Li X, Chen L, Li Y, et al. A broadband achromatic metalens array for integral imaging in the visible. Light Sci Appl. 2019;8:99.&amp;lt;/ref&amp;gt;&amp;lt;ref name=&quot;Ng2005&quot;&amp;gt;Ng R, Levoy M, Brédif M, et al. Light field photography with a hand-held plenoptic camera. Computer Science Technical Report. 2005;2(11):1-11.&amp;lt;/ref&amp;gt; In sensing, microlens-based &#039;&#039;plenoptic&#039;&#039; or light-field cameras capture the full 4D light field, allowing computational refocusing and depth estimation.&amp;lt;ref name=&quot;Ng2005&quot; /&amp;gt; &amp;lt;ref name=&quot;Yang2018&quot;&amp;gt;Yang L, Guo Y. Eye tracking using a light field camera on a head-mounted display. US Patent Application US20180173303A1. 2018 Jun 21.&amp;lt;/ref&amp;gt; Modern VR/AR prototypes leverage microlens arrays, [[Light field display]] techniques, [[Integral imaging]], holographic waveguide couplers, and specialized lens-array modules for eye tracking and depth sensing. These components appear in devices such as wide-FOV near-eye displays and optical see-through [[Head-mounted display]]s.&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;==History==&lt;/div&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;==History==&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;/table&gt;</summary>
		<author><name>Xinreality</name></author>
	</entry>
	<entry>
		<id>https://vrarwiki.com/index.php?title=Lens_array&amp;diff=34716&amp;oldid=prev</id>
		<title>Xinreality at 03:15, 30 April 2025</title>
		<link rel="alternate" type="text/html" href="https://vrarwiki.com/index.php?title=Lens_array&amp;diff=34716&amp;oldid=prev"/>
		<updated>2025-04-30T03:15:38Z</updated>

		<summary type="html">&lt;p&gt;&lt;/p&gt;
&lt;table style=&quot;background-color: #fff; color: #202122;&quot; data-mw=&quot;interface&quot;&gt;
				&lt;col class=&quot;diff-marker&quot; /&gt;
				&lt;col class=&quot;diff-content&quot; /&gt;
				&lt;col class=&quot;diff-marker&quot; /&gt;
				&lt;col class=&quot;diff-content&quot; /&gt;
				&lt;tr class=&quot;diff-title&quot; lang=&quot;en&quot;&gt;
				&lt;td colspan=&quot;2&quot; style=&quot;background-color: #fff; color: #202122; text-align: center;&quot;&gt;← Older revision&lt;/td&gt;
				&lt;td colspan=&quot;2&quot; style=&quot;background-color: #fff; color: #202122; text-align: center;&quot;&gt;Revision as of 03:15, 30 April 2025&lt;/td&gt;
				&lt;/tr&gt;&lt;tr&gt;&lt;td colspan=&quot;2&quot; class=&quot;diff-lineno&quot; id=&quot;mw-diff-left-l2&quot;&gt;Line 2:&lt;/td&gt;
&lt;td colspan=&quot;2&quot; class=&quot;diff-lineno&quot;&gt;Line 2:&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;Lens arrays are two-dimensional arrangements of many small lenses (often [[Microlens array]]s) that manipulate light fields for imaging or display. In [[Virtual reality]] (VR) and [[Augmented reality]] (AR) systems, lens arrays serve two broad roles: as &amp;#039;&amp;#039;&amp;#039;display optics&amp;#039;&amp;#039;&amp;#039; that create 3D or light-field images, and as &amp;#039;&amp;#039;&amp;#039;sensor optics&amp;#039;&amp;#039;&amp;#039; that capture directional light for depth and eye tracking. In displays, lens arrays enable multi-view and focal-plane rendering (e.g. light-field displays or integral imaging) by splitting the image into many sub-images corresponding to different angles or depths.&amp;lt;ref name=&amp;quot;Li2019&amp;quot;&amp;gt;Li X, Chen L, Li Y, et al. A broadband achromatic metalens array for integral imaging in the visible. Light Sci Appl. 2019;8:99.&amp;lt;/ref&amp;gt;&amp;lt;ref name=&amp;quot;Ng2005&amp;quot;&amp;gt;Ng R, Levoy M, Brédif M, et al. Light field photography with a hand-held plenoptic camera. Computer Science Technical Report. 2005;2(11):1-11.&amp;lt;/ref&amp;gt; In sensing, microlens-based &amp;#039;&amp;#039;plenoptic&amp;#039;&amp;#039; or light-field cameras capture the full 4D light field, allowing computational refocusing and depth estimation.&amp;lt;ref name=&amp;quot;Ng2005&amp;quot; /&amp;gt; &amp;lt;ref name=&amp;quot;Yang2018&amp;quot;&amp;gt;Yang L, Guo Y. Eye tracking using a light field camera on a head-mounted display. US Patent Application US20180173303A1. 2018 Jun 21.&amp;lt;/ref&amp;gt; Modern VR/AR prototypes leverage microlens arrays, [[Light field display]] techniques, [[Integral imaging]], holographic waveguide couplers, and specialized lens-array modules for eye tracking and depth sensing. These components appear in devices such as wide-FOV near-eye displays and optical see-through [[Head-mounted display]]s.&lt;/div&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;Lens arrays are two-dimensional arrangements of many small lenses (often [[Microlens array]]s) that manipulate light fields for imaging or display. In [[Virtual reality]] (VR) and [[Augmented reality]] (AR) systems, lens arrays serve two broad roles: as &amp;#039;&amp;#039;&amp;#039;display optics&amp;#039;&amp;#039;&amp;#039; that create 3D or light-field images, and as &amp;#039;&amp;#039;&amp;#039;sensor optics&amp;#039;&amp;#039;&amp;#039; that capture directional light for depth and eye tracking. In displays, lens arrays enable multi-view and focal-plane rendering (e.g. light-field displays or integral imaging) by splitting the image into many sub-images corresponding to different angles or depths.&amp;lt;ref name=&amp;quot;Li2019&amp;quot;&amp;gt;Li X, Chen L, Li Y, et al. A broadband achromatic metalens array for integral imaging in the visible. Light Sci Appl. 2019;8:99.&amp;lt;/ref&amp;gt;&amp;lt;ref name=&amp;quot;Ng2005&amp;quot;&amp;gt;Ng R, Levoy M, Brédif M, et al. Light field photography with a hand-held plenoptic camera. Computer Science Technical Report. 2005;2(11):1-11.&amp;lt;/ref&amp;gt; In sensing, microlens-based &amp;#039;&amp;#039;plenoptic&amp;#039;&amp;#039; or light-field cameras capture the full 4D light field, allowing computational refocusing and depth estimation.&amp;lt;ref name=&amp;quot;Ng2005&amp;quot; /&amp;gt; &amp;lt;ref name=&amp;quot;Yang2018&amp;quot;&amp;gt;Yang L, Guo Y. Eye tracking using a light field camera on a head-mounted display. US Patent Application US20180173303A1. 2018 Jun 21.&amp;lt;/ref&amp;gt; Modern VR/AR prototypes leverage microlens arrays, [[Light field display]] techniques, [[Integral imaging]], holographic waveguide couplers, and specialized lens-array modules for eye tracking and depth sensing. These components appear in devices such as wide-FOV near-eye displays and optical see-through [[Head-mounted display]]s.&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;−&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #ffe49c; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;==History =&lt;/div&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;+&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #a3d3ff; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;==History&lt;ins style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;=&lt;/ins&gt;=&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;Lens-array technology traces back over a century. Gabriel Lippmann first proposed &amp;quot;integral photography&amp;quot; in 1908, capturing 3D scenes via a lens grid.&amp;lt;ref name=&amp;quot;Li2019&amp;quot; /&amp;gt; Early implementations used pinhole arrays (circa 1911) and later simple microlens plates (around 1948) to record and replay light fields.&amp;lt;ref name=&amp;quot;Li2019&amp;quot; /&amp;gt; In the mid-20th century, lenticular (cylindrical lens) sheets became popular for autostereoscopic prints and displays (e.g. 3D postcards and packaging), providing separate views for each eye. By the 2000s, advances in digital displays and microfabrication revived lens-array research for head-worn displays. For example, smartphone-scale integral imaging was demonstrated by pairing a display with a matching MLA.&amp;lt;ref name=&amp;quot;Li2019&amp;quot; /&amp;gt; In recent years, VR/AR research has produced thin, wide-FOV near-eye displays using sophisticated lens arrays (e.g. polarization optics or metasurfaces)&amp;lt;ref name=&amp;quot;Shin2023&amp;quot;&amp;gt;Shin G, Lee Y, Kim J, et al. Field of view and angular-resolution enhancement in microlens array type virtual reality near-eye display using polarization grating. PubMed. 2023;39876217.&amp;lt;/ref&amp;gt;, as well as compact eye-tracking and depth cameras using microlens arrays.&amp;lt;ref name=&amp;quot;Yang2018&amp;quot; /&amp;gt;&amp;lt;ref name=&amp;quot;Microsoft2020&amp;quot;&amp;gt;Microsoft. Camera comprising lens array. Patent Nweon. 2020;30768.&amp;lt;/ref&amp;gt;&lt;/div&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;Lens-array technology traces back over a century. Gabriel Lippmann first proposed &amp;quot;integral photography&amp;quot; in 1908, capturing 3D scenes via a lens grid.&amp;lt;ref name=&amp;quot;Li2019&amp;quot; /&amp;gt; Early implementations used pinhole arrays (circa 1911) and later simple microlens plates (around 1948) to record and replay light fields.&amp;lt;ref name=&amp;quot;Li2019&amp;quot; /&amp;gt; In the mid-20th century, lenticular (cylindrical lens) sheets became popular for autostereoscopic prints and displays (e.g. 3D postcards and packaging), providing separate views for each eye. By the 2000s, advances in digital displays and microfabrication revived lens-array research for head-worn displays. For example, smartphone-scale integral imaging was demonstrated by pairing a display with a matching MLA.&amp;lt;ref name=&amp;quot;Li2019&amp;quot; /&amp;gt; In recent years, VR/AR research has produced thin, wide-FOV near-eye displays using sophisticated lens arrays (e.g. polarization optics or metasurfaces)&amp;lt;ref name=&amp;quot;Shin2023&amp;quot;&amp;gt;Shin G, Lee Y, Kim J, et al. Field of view and angular-resolution enhancement in microlens array type virtual reality near-eye display using polarization grating. PubMed. 2023;39876217.&amp;lt;/ref&amp;gt;, as well as compact eye-tracking and depth cameras using microlens arrays.&amp;lt;ref name=&amp;quot;Yang2018&amp;quot; /&amp;gt;&amp;lt;ref name=&amp;quot;Microsoft2020&amp;quot;&amp;gt;Microsoft. Camera comprising lens array. Patent Nweon. 2020;30768.&amp;lt;/ref&amp;gt;&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;/table&gt;</summary>
		<author><name>Xinreality</name></author>
	</entry>
	<entry>
		<id>https://vrarwiki.com/index.php?title=Lens_array&amp;diff=34715&amp;oldid=prev</id>
		<title>Xinreality at 03:15, 30 April 2025</title>
		<link rel="alternate" type="text/html" href="https://vrarwiki.com/index.php?title=Lens_array&amp;diff=34715&amp;oldid=prev"/>
		<updated>2025-04-30T03:15:26Z</updated>

		<summary type="html">&lt;p&gt;&lt;/p&gt;
&lt;table style=&quot;background-color: #fff; color: #202122;&quot; data-mw=&quot;interface&quot;&gt;
				&lt;col class=&quot;diff-marker&quot; /&gt;
				&lt;col class=&quot;diff-content&quot; /&gt;
				&lt;col class=&quot;diff-marker&quot; /&gt;
				&lt;col class=&quot;diff-content&quot; /&gt;
				&lt;tr class=&quot;diff-title&quot; lang=&quot;en&quot;&gt;
				&lt;td colspan=&quot;2&quot; style=&quot;background-color: #fff; color: #202122; text-align: center;&quot;&gt;← Older revision&lt;/td&gt;
				&lt;td colspan=&quot;2&quot; style=&quot;background-color: #fff; color: #202122; text-align: center;&quot;&gt;Revision as of 03:15, 30 April 2025&lt;/td&gt;
				&lt;/tr&gt;&lt;tr&gt;&lt;td colspan=&quot;2&quot; class=&quot;diff-lineno&quot; id=&quot;mw-diff-left-l1&quot;&gt;Line 1:&lt;/td&gt;
&lt;td colspan=&quot;2&quot; class=&quot;diff-lineno&quot;&gt;Line 1:&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;−&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #ffe49c; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&lt;del style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;= Lens array in virtual and augmented reality =&lt;/del&gt;&lt;/div&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;+&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #a3d3ff; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&lt;ins style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;{{see also|Terms|Technical Terms}}&lt;/ins&gt;&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;−&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #ffe49c; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt; &lt;/div&gt;&lt;/td&gt;&lt;td colspan=&quot;2&quot; class=&quot;diff-side-added&quot;&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;Lens arrays are two-dimensional arrangements of many small lenses (often [[Microlens array]]s) that manipulate light fields for imaging or display. In [[Virtual reality]] (VR) and [[Augmented reality]] (AR) systems, lens arrays serve two broad roles: as &amp;#039;&amp;#039;&amp;#039;display optics&amp;#039;&amp;#039;&amp;#039; that create 3D or light-field images, and as &amp;#039;&amp;#039;&amp;#039;sensor optics&amp;#039;&amp;#039;&amp;#039; that capture directional light for depth and eye tracking. In displays, lens arrays enable multi-view and focal-plane rendering (e.g. light-field displays or integral imaging) by splitting the image into many sub-images corresponding to different angles or depths.&amp;lt;ref name=&amp;quot;Li2019&amp;quot;&amp;gt;Li X, Chen L, Li Y, et al. A broadband achromatic metalens array for integral imaging in the visible. Light Sci Appl. 2019;8:99.&amp;lt;/ref&amp;gt;&amp;lt;ref name=&amp;quot;Ng2005&amp;quot;&amp;gt;Ng R, Levoy M, Brédif M, et al. Light field photography with a hand-held plenoptic camera. Computer Science Technical Report. 2005;2(11):1-11.&amp;lt;/ref&amp;gt; In sensing, microlens-based &amp;#039;&amp;#039;plenoptic&amp;#039;&amp;#039; or light-field cameras capture the full 4D light field, allowing computational refocusing and depth estimation.&amp;lt;ref name=&amp;quot;Ng2005&amp;quot; /&amp;gt; &amp;lt;ref name=&amp;quot;Yang2018&amp;quot;&amp;gt;Yang L, Guo Y. Eye tracking using a light field camera on a head-mounted display. US Patent Application US20180173303A1. 2018 Jun 21.&amp;lt;/ref&amp;gt; Modern VR/AR prototypes leverage microlens arrays, [[Light field display]] techniques, [[Integral imaging]], holographic waveguide couplers, and specialized lens-array modules for eye tracking and depth sensing. These components appear in devices such as wide-FOV near-eye displays and optical see-through [[Head-mounted display]]s.&lt;/div&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;Lens arrays are two-dimensional arrangements of many small lenses (often [[Microlens array]]s) that manipulate light fields for imaging or display. In [[Virtual reality]] (VR) and [[Augmented reality]] (AR) systems, lens arrays serve two broad roles: as &amp;#039;&amp;#039;&amp;#039;display optics&amp;#039;&amp;#039;&amp;#039; that create 3D or light-field images, and as &amp;#039;&amp;#039;&amp;#039;sensor optics&amp;#039;&amp;#039;&amp;#039; that capture directional light for depth and eye tracking. In displays, lens arrays enable multi-view and focal-plane rendering (e.g. light-field displays or integral imaging) by splitting the image into many sub-images corresponding to different angles or depths.&amp;lt;ref name=&amp;quot;Li2019&amp;quot;&amp;gt;Li X, Chen L, Li Y, et al. A broadband achromatic metalens array for integral imaging in the visible. Light Sci Appl. 2019;8:99.&amp;lt;/ref&amp;gt;&amp;lt;ref name=&amp;quot;Ng2005&amp;quot;&amp;gt;Ng R, Levoy M, Brédif M, et al. Light field photography with a hand-held plenoptic camera. Computer Science Technical Report. 2005;2(11):1-11.&amp;lt;/ref&amp;gt; In sensing, microlens-based &amp;#039;&amp;#039;plenoptic&amp;#039;&amp;#039; or light-field cameras capture the full 4D light field, allowing computational refocusing and depth estimation.&amp;lt;ref name=&amp;quot;Ng2005&amp;quot; /&amp;gt; &amp;lt;ref name=&amp;quot;Yang2018&amp;quot;&amp;gt;Yang L, Guo Y. Eye tracking using a light field camera on a head-mounted display. US Patent Application US20180173303A1. 2018 Jun 21.&amp;lt;/ref&amp;gt; Modern VR/AR prototypes leverage microlens arrays, [[Light field display]] techniques, [[Integral imaging]], holographic waveguide couplers, and specialized lens-array modules for eye tracking and depth sensing. These components appear in devices such as wide-FOV near-eye displays and optical see-through [[Head-mounted display]]s.&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;−&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #ffe49c; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;== History =&lt;del style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;=&lt;/del&gt;&lt;/div&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;+&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #a3d3ff; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;==History =&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;−&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #ffe49c; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt; &lt;/div&gt;&lt;/td&gt;&lt;td colspan=&quot;2&quot; class=&quot;diff-side-added&quot;&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;Lens-array technology traces back over a century. Gabriel Lippmann first proposed &amp;quot;integral photography&amp;quot; in 1908, capturing 3D scenes via a lens grid.&amp;lt;ref name=&amp;quot;Li2019&amp;quot; /&amp;gt; Early implementations used pinhole arrays (circa 1911) and later simple microlens plates (around 1948) to record and replay light fields.&amp;lt;ref name=&amp;quot;Li2019&amp;quot; /&amp;gt; In the mid-20th century, lenticular (cylindrical lens) sheets became popular for autostereoscopic prints and displays (e.g. 3D postcards and packaging), providing separate views for each eye. By the 2000s, advances in digital displays and microfabrication revived lens-array research for head-worn displays. For example, smartphone-scale integral imaging was demonstrated by pairing a display with a matching MLA.&amp;lt;ref name=&amp;quot;Li2019&amp;quot; /&amp;gt; In recent years, VR/AR research has produced thin, wide-FOV near-eye displays using sophisticated lens arrays (e.g. polarization optics or metasurfaces)&amp;lt;ref name=&amp;quot;Shin2023&amp;quot;&amp;gt;Shin G, Lee Y, Kim J, et al. Field of view and angular-resolution enhancement in microlens array type virtual reality near-eye display using polarization grating. PubMed. 2023;39876217.&amp;lt;/ref&amp;gt;, as well as compact eye-tracking and depth cameras using microlens arrays.&amp;lt;ref name=&amp;quot;Yang2018&amp;quot; /&amp;gt;&amp;lt;ref name=&amp;quot;Microsoft2020&amp;quot;&amp;gt;Microsoft. Camera comprising lens array. Patent Nweon. 2020;30768.&amp;lt;/ref&amp;gt;&lt;/div&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;Lens-array technology traces back over a century. Gabriel Lippmann first proposed &amp;quot;integral photography&amp;quot; in 1908, capturing 3D scenes via a lens grid.&amp;lt;ref name=&amp;quot;Li2019&amp;quot; /&amp;gt; Early implementations used pinhole arrays (circa 1911) and later simple microlens plates (around 1948) to record and replay light fields.&amp;lt;ref name=&amp;quot;Li2019&amp;quot; /&amp;gt; In the mid-20th century, lenticular (cylindrical lens) sheets became popular for autostereoscopic prints and displays (e.g. 3D postcards and packaging), providing separate views for each eye. By the 2000s, advances in digital displays and microfabrication revived lens-array research for head-worn displays. For example, smartphone-scale integral imaging was demonstrated by pairing a display with a matching MLA.&amp;lt;ref name=&amp;quot;Li2019&amp;quot; /&amp;gt; In recent years, VR/AR research has produced thin, wide-FOV near-eye displays using sophisticated lens arrays (e.g. polarization optics or metasurfaces)&amp;lt;ref name=&amp;quot;Shin2023&amp;quot;&amp;gt;Shin G, Lee Y, Kim J, et al. Field of view and angular-resolution enhancement in microlens array type virtual reality near-eye display using polarization grating. PubMed. 2023;39876217.&amp;lt;/ref&amp;gt;, as well as compact eye-tracking and depth cameras using microlens arrays.&amp;lt;ref name=&amp;quot;Yang2018&amp;quot; /&amp;gt;&amp;lt;ref name=&amp;quot;Microsoft2020&amp;quot;&amp;gt;Microsoft. Camera comprising lens array. Patent Nweon. 2020;30768.&amp;lt;/ref&amp;gt;&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td colspan=&quot;2&quot; class=&quot;diff-lineno&quot; id=&quot;mw-diff-left-l35&quot;&gt;Line 35:&lt;/td&gt;
&lt;td colspan=&quot;2&quot; class=&quot;diff-lineno&quot;&gt;Line 33:&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&amp;#039;&amp;#039;&amp;#039;Focal cueing and depth enhancement:&amp;#039;&amp;#039;&amp;#039; Some VR/AR displays incorporate multiple lens arrays for depth/focus manipulation. For instance, a light-field HMD may use two stacked MLA arrays (a so-called dual-focal arrangement) to enlarge the depth range so that virtual objects at different distances can appear simultaneously in focus. Polarization or liquid crystal arrays have been used to switch between focus planes. These advanced architectures aim to overcome the vergence-accommodation mismatch by aligning virtual image focus with convergence.&lt;/div&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&amp;#039;&amp;#039;&amp;#039;Focal cueing and depth enhancement:&amp;#039;&amp;#039;&amp;#039; Some VR/AR displays incorporate multiple lens arrays for depth/focus manipulation. For instance, a light-field HMD may use two stacked MLA arrays (a so-called dual-focal arrangement) to enlarge the depth range so that virtual objects at different distances can appear simultaneously in focus. Polarization or liquid crystal arrays have been used to switch between focus planes. These advanced architectures aim to overcome the vergence-accommodation mismatch by aligning virtual image focus with convergence.&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;−&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #ffe49c; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;== Applications in sensing ==&lt;/div&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;+&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #a3d3ff; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;==Applications in sensing==&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;−&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #ffe49c; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt; &lt;/div&gt;&lt;/td&gt;&lt;td colspan=&quot;2&quot; class=&quot;diff-side-added&quot;&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&amp;#039;&amp;#039;&amp;#039;Light-field (plenoptic) cameras for depth and eye tracking:&amp;#039;&amp;#039;&amp;#039; Lens arrays are fundamental to plenoptic imaging. Placing an MLA a focal distance in front of an image sensor allows each micro-image to capture rays from different angles.&amp;lt;ref name=&amp;quot;Ng2005&amp;quot; /&amp;gt; This effectively samples the full 4D light field of the scene. With computational processing, one can refocus the image after capture or compute depth maps from parallax between the micro-images.&amp;lt;ref name=&amp;quot;Ng2005&amp;quot; /&amp;gt; In VR/AR, this is useful both for external depth sensing (scene reconstruction) and internal eye imaging. For example, patents describe using a light-field camera (with MLA) inside an HMD to capture the user&amp;#039;s eye. The captured plenoptic data lets the system digitally refocus on various eye regions and compute gaze direction without needing precise IR glints.&amp;lt;ref name=&amp;quot;Yang2018&amp;quot; /&amp;gt; This relaxes the geometric constraints on eye-tracker placement. Thus, microlens-based light-field cameras can support both environmental mapping and fine eye tracking in headsets.&lt;/div&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&amp;#039;&amp;#039;&amp;#039;Light-field (plenoptic) cameras for depth and eye tracking:&amp;#039;&amp;#039;&amp;#039; Lens arrays are fundamental to plenoptic imaging. Placing an MLA a focal distance in front of an image sensor allows each micro-image to capture rays from different angles.&amp;lt;ref name=&amp;quot;Ng2005&amp;quot; /&amp;gt; This effectively samples the full 4D light field of the scene. With computational processing, one can refocus the image after capture or compute depth maps from parallax between the micro-images.&amp;lt;ref name=&amp;quot;Ng2005&amp;quot; /&amp;gt; In VR/AR, this is useful both for external depth sensing (scene reconstruction) and internal eye imaging. For example, patents describe using a light-field camera (with MLA) inside an HMD to capture the user&amp;#039;s eye. The captured plenoptic data lets the system digitally refocus on various eye regions and compute gaze direction without needing precise IR glints.&amp;lt;ref name=&amp;quot;Yang2018&amp;quot; /&amp;gt; This relaxes the geometric constraints on eye-tracker placement. Thus, microlens-based light-field cameras can support both environmental mapping and fine eye tracking in headsets.&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td colspan=&quot;2&quot; class=&quot;diff-lineno&quot; id=&quot;mw-diff-left-l43&quot;&gt;Line 43:&lt;/td&gt;
&lt;td colspan=&quot;2&quot; class=&quot;diff-lineno&quot;&gt;Line 40:&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&amp;#039;&amp;#039;&amp;#039;Depth sensors:&amp;#039;&amp;#039;&amp;#039; Outside of full light-field cameras, some depth-sensing concepts also use microlenses. One approach is a multi-aperture structured-light projector: an array of tiny beams (formed by a lens array) projects a coded IR pattern for depth triangulation. Another is embedding micro-lenses over depth-sensing pixels to increase fill factor or directivity. In practice, however, most time-of-flight and stereo cameras in VR/AR do not use discrete lens arrays (they use single large lenses or laser projectors). The main use of lens arrays in sensing is thus in light-field capture (including gaze capture) rather than typical ToF or stereo modules.&lt;/div&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&amp;#039;&amp;#039;&amp;#039;Depth sensors:&amp;#039;&amp;#039;&amp;#039; Outside of full light-field cameras, some depth-sensing concepts also use microlenses. One approach is a multi-aperture structured-light projector: an array of tiny beams (formed by a lens array) projects a coded IR pattern for depth triangulation. Another is embedding micro-lenses over depth-sensing pixels to increase fill factor or directivity. In practice, however, most time-of-flight and stereo cameras in VR/AR do not use discrete lens arrays (they use single large lenses or laser projectors). The main use of lens arrays in sensing is thus in light-field capture (including gaze capture) rather than typical ToF or stereo modules.&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;−&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #ffe49c; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;== Technical specifications ==&lt;/div&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;+&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #a3d3ff; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;==Technical specifications==&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;−&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #ffe49c; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt; &lt;/div&gt;&lt;/td&gt;&lt;td colspan=&quot;2&quot; class=&quot;diff-side-added&quot;&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;Lens-array designs involve several key parameters:&lt;/div&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;Lens-array designs involve several key parameters:&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td colspan=&quot;2&quot; class=&quot;diff-lineno&quot; id=&quot;mw-diff-left-l61&quot;&gt;Line 61:&lt;/td&gt;
&lt;td colspan=&quot;2&quot; class=&quot;diff-lineno&quot;&gt;Line 57:&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;Overall, the technical design of a lens array involves a trade-off between FOV, resolution, brightness, and physical thickness. Emerging approaches like metalens arrays promise thinner optics with engineered dispersion&amp;lt;ref name=&amp;quot;Li2019&amp;quot; /&amp;gt;, which may shift these trade-offs in future systems.&lt;/div&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;Overall, the technical design of a lens array involves a trade-off between FOV, resolution, brightness, and physical thickness. Emerging approaches like metalens arrays promise thinner optics with engineered dispersion&amp;lt;ref name=&amp;quot;Li2019&amp;quot; /&amp;gt;, which may shift these trade-offs in future systems.&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;−&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #ffe49c; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;== Challenges ==&lt;/div&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;+&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #a3d3ff; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;==Challenges==&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;−&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #ffe49c; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt; &lt;/div&gt;&lt;/td&gt;&lt;td colspan=&quot;2&quot; class=&quot;diff-side-added&quot;&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;Lens-array components face several challenges in VR/AR:&lt;/div&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;Lens-array components face several challenges in VR/AR:&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td colspan=&quot;2&quot; class=&quot;diff-lineno&quot; id=&quot;mw-diff-left-l77&quot;&gt;Line 77:&lt;/td&gt;
&lt;td colspan=&quot;2&quot; class=&quot;diff-lineno&quot;&gt;Line 72:&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&amp;#039;&amp;#039;&amp;#039;Manufacturing scale and cost:&amp;#039;&amp;#039;&amp;#039; Large, high-quality MLAs (especially with small lenslets) are challenging to produce over large areas. Holographic and metasurface arrays often require cleanroom fabrication. For consumer VR/AR, cost-effective replication (e.g. using nanoimprint or injection molding) is crucial but may not yet match the performance of lab prototypes.&lt;/div&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&amp;#039;&amp;#039;&amp;#039;Manufacturing scale and cost:&amp;#039;&amp;#039;&amp;#039; Large, high-quality MLAs (especially with small lenslets) are challenging to produce over large areas. Holographic and metasurface arrays often require cleanroom fabrication. For consumer VR/AR, cost-effective replication (e.g. using nanoimprint or injection molding) is crucial but may not yet match the performance of lab prototypes.&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;−&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #ffe49c; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;== Future developments ==&lt;/div&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;+&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #a3d3ff; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;==Future developments==&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;Research on lens-array technology is advancing rapidly. &amp;#039;&amp;#039;&amp;#039;Adaptive optics&amp;#039;&amp;#039;&amp;#039; will likely play a growing role. Arrays of liquid-crystal or shape-changing lenses could allow dynamic focus control and multi-focal displays (reducing vergence-accommodation conflict). Similarly, &amp;#039;&amp;#039;&amp;#039;dynamic wavelength control&amp;#039;&amp;#039;&amp;#039; (e.g. polarization or tunable filters in each lenslet) could enable spatiotemporal multiplexing for color and focus.&lt;/div&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;Research on lens-array technology is advancing rapidly. &amp;#039;&amp;#039;&amp;#039;Adaptive optics&amp;#039;&amp;#039;&amp;#039; will likely play a growing role. Arrays of liquid-crystal or shape-changing lenses could allow dynamic focus control and multi-focal displays (reducing vergence-accommodation conflict). Similarly, &amp;#039;&amp;#039;&amp;#039;dynamic wavelength control&amp;#039;&amp;#039;&amp;#039; (e.g. polarization or tunable filters in each lenslet) could enable spatiotemporal multiplexing for color and focus.&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td colspan=&quot;2&quot; class=&quot;diff-lineno&quot; id=&quot;mw-diff-left-l91&quot;&gt;Line 91:&lt;/td&gt;
&lt;td colspan=&quot;2&quot; class=&quot;diff-lineno&quot;&gt;Line 86:&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;As VR/AR systems aim for wider FOV, thinner form factors, and better realism, custom lens-array designs will continue to evolve. Each new generation of headsets (for example, employing pancake optics, multi-zone optics, or holographic waveguides) tends to reinvigorate lens-array innovation. In sum, lens arrays remain a key enabling technology for immersive displays and interactive sensing, with ongoing research focusing on mitigating their limitations and leveraging novel materials and computation.&amp;lt;ref name=&amp;quot;Achromatic2025&amp;quot; /&amp;gt;&amp;lt;ref name=&amp;quot;Wei2023&amp;quot; /&amp;gt;&lt;/div&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;As VR/AR systems aim for wider FOV, thinner form factors, and better realism, custom lens-array designs will continue to evolve. Each new generation of headsets (for example, employing pancake optics, multi-zone optics, or holographic waveguides) tends to reinvigorate lens-array innovation. In sum, lens arrays remain a key enabling technology for immersive displays and interactive sensing, with ongoing research focusing on mitigating their limitations and leveraging novel materials and computation.&amp;lt;ref name=&amp;quot;Achromatic2025&amp;quot; /&amp;gt;&amp;lt;ref name=&amp;quot;Wei2023&amp;quot; /&amp;gt;&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;−&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #ffe49c; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;== References ==&lt;/div&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;+&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #a3d3ff; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;==References==&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&amp;lt;references /&amp;gt;&lt;/div&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&amp;lt;references /&amp;gt;&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td colspan=&quot;2&quot; class=&quot;diff-side-deleted&quot;&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;+&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #a3d3ff; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&lt;ins style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;[[Category:Terms]]&lt;/ins&gt;&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td colspan=&quot;2&quot; class=&quot;diff-side-deleted&quot;&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;+&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #a3d3ff; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&lt;ins style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;[[Category:Technical Terms]]&lt;/ins&gt;&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;[[Category:Virtual reality]]&lt;/div&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;[[Category:Virtual reality]]&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;[[Category:Augmented reality]]&lt;/div&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;[[Category:Augmented reality]]&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;[[Category:Optical devices]]&lt;/div&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;[[Category:Optical devices]]&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;[[Category:Display technology]]&lt;/div&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;[[Category:Display technology]]&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;/table&gt;</summary>
		<author><name>Xinreality</name></author>
	</entry>
	<entry>
		<id>https://vrarwiki.com/index.php?title=Lens_array&amp;diff=34714&amp;oldid=prev</id>
		<title>Xinreality at 03:09, 30 April 2025</title>
		<link rel="alternate" type="text/html" href="https://vrarwiki.com/index.php?title=Lens_array&amp;diff=34714&amp;oldid=prev"/>
		<updated>2025-04-30T03:09:23Z</updated>

		<summary type="html">&lt;p&gt;&lt;/p&gt;
&lt;table style=&quot;background-color: #fff; color: #202122;&quot; data-mw=&quot;interface&quot;&gt;
				&lt;col class=&quot;diff-marker&quot; /&gt;
				&lt;col class=&quot;diff-content&quot; /&gt;
				&lt;col class=&quot;diff-marker&quot; /&gt;
				&lt;col class=&quot;diff-content&quot; /&gt;
				&lt;tr class=&quot;diff-title&quot; lang=&quot;en&quot;&gt;
				&lt;td colspan=&quot;2&quot; style=&quot;background-color: #fff; color: #202122; text-align: center;&quot;&gt;← Older revision&lt;/td&gt;
				&lt;td colspan=&quot;2&quot; style=&quot;background-color: #fff; color: #202122; text-align: center;&quot;&gt;Revision as of 03:09, 30 April 2025&lt;/td&gt;
				&lt;/tr&gt;&lt;tr&gt;&lt;td colspan=&quot;2&quot; class=&quot;diff-lineno&quot; id=&quot;mw-diff-left-l92&quot;&gt;Line 92:&lt;/td&gt;
&lt;td colspan=&quot;2&quot; class=&quot;diff-lineno&quot;&gt;Line 92:&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;== References ==&lt;/div&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;== References ==&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;−&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #ffe49c; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&lt;del style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;{{reflist}}&lt;/del&gt;&lt;/div&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;+&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #a3d3ff; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&lt;ins style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;&amp;lt;references /&amp;gt;&lt;/ins&gt;&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;[[Category:Virtual reality]]&lt;/div&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;[[Category:Virtual reality]]&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;/table&gt;</summary>
		<author><name>Xinreality</name></author>
	</entry>
	<entry>
		<id>https://vrarwiki.com/index.php?title=Lens_array&amp;diff=34713&amp;oldid=prev</id>
		<title>Xinreality at 02:51, 30 April 2025</title>
		<link rel="alternate" type="text/html" href="https://vrarwiki.com/index.php?title=Lens_array&amp;diff=34713&amp;oldid=prev"/>
		<updated>2025-04-30T02:51:27Z</updated>

		<summary type="html">&lt;p&gt;&lt;/p&gt;
&lt;table style=&quot;background-color: #fff; color: #202122;&quot; data-mw=&quot;interface&quot;&gt;
				&lt;col class=&quot;diff-marker&quot; /&gt;
				&lt;col class=&quot;diff-content&quot; /&gt;
				&lt;col class=&quot;diff-marker&quot; /&gt;
				&lt;col class=&quot;diff-content&quot; /&gt;
				&lt;tr class=&quot;diff-title&quot; lang=&quot;en&quot;&gt;
				&lt;td colspan=&quot;2&quot; style=&quot;background-color: #fff; color: #202122; text-align: center;&quot;&gt;← Older revision&lt;/td&gt;
				&lt;td colspan=&quot;2&quot; style=&quot;background-color: #fff; color: #202122; text-align: center;&quot;&gt;Revision as of 02:51, 30 April 2025&lt;/td&gt;
				&lt;/tr&gt;&lt;tr&gt;&lt;td colspan=&quot;2&quot; class=&quot;diff-lineno&quot; id=&quot;mw-diff-left-l47&quot;&gt;Line 47:&lt;/td&gt;
&lt;td colspan=&quot;2&quot; class=&quot;diff-lineno&quot;&gt;Line 47:&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;Lens-array designs involve several key parameters:&lt;/div&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;Lens-array designs involve several key parameters:&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;−&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #ffe49c; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&#039;&#039;&#039;Lens pitch (size):&#039;&#039;&#039; The center-to-center spacing of the lenslets. Near-eye display lens pitches are often on the order of 0.5–3 mm. For example, a wide-FOV scanning AR prototype used a &quot;chiral&quot; LC lens array of 8×15 lenses with 2 mm pitch.&amp;lt;ref name&lt;/div&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;+&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #a3d3ff; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&#039;&#039;&#039;Lens pitch (size):&#039;&#039;&#039; The center-to-center spacing of the lenslets. Near-eye display lens pitches are often on the order of 0.5–3 mm. For example, a wide-FOV scanning AR prototype used a &quot;chiral&quot; LC lens array of 8×15 lenses with 2 mm pitch.&amp;lt;ref name&lt;ins style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;=&quot;Wei2023&quot; /&amp;gt; An AR waveguide coupler in another system used spherical lenslets with 1 mm pitch.&amp;lt;ref name=&quot;Jang2021&quot; /&amp;gt; Plenoptic camera MLAs, by contrast, have much finer pitch (tens to hundreds of µm) to densely sample the image plane. Pitch determines the tradeoff between image resolution and angular coverage: smaller pitch yields higher angular resolution (more sub-aperture views) but collects less light per lens.&lt;/ins&gt;&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td colspan=&quot;2&quot; class=&quot;diff-side-deleted&quot;&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;+&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #a3d3ff; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt; &lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td colspan=&quot;2&quot; class=&quot;diff-side-deleted&quot;&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;+&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #a3d3ff; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&lt;ins style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;&#039;&#039;&#039;Focal length and f-number:&#039;&#039;&#039; Each lenslet&#039;s focal length sets the viewing frustum of that micro-aperture. Low f-number (wide aperture) means a large view angle per lens, which broadens the overall FOV of the system. In the scanning waveguide example, the 2 mm lenslets had an f-number of about 0.41 at 639 nm.&amp;lt;ref name=&quot;Wei2023&quot; /&amp;gt; In designs, the focal length is often chosen to collimate or focus light from the display panel to the eye (in displays) or from the scene to the sensor (in cameras). Mismatches in focal length across the array can create blurring or depth errors.&lt;/ins&gt;&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&amp;#039;&amp;#039;&amp;#039;Aperture shape and fill factor:&amp;#039;&amp;#039;&amp;#039; Lenslets may be round or hexagonal. Hexagonal or honeycomb layouts can achieve near-100% fill factor (no dead zones) which maximizes brightness. Fill-factor and uniformity are critical: any gap between lenses can cause vignetting or loss of resolution. In fabrication, arrays are usually molded or imprinted in photoresist, and then replicated in glass or plastic.&lt;/div&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&amp;#039;&amp;#039;&amp;#039;Aperture shape and fill factor:&amp;#039;&amp;#039;&amp;#039; Lenslets may be round or hexagonal. Hexagonal or honeycomb layouts can achieve near-100% fill factor (no dead zones) which maximizes brightness. Fill-factor and uniformity are critical: any gap between lenses can cause vignetting or loss of resolution. In fabrication, arrays are usually molded or imprinted in photoresist, and then replicated in glass or plastic.&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;−&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #ffe49c; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&#039;&#039;&#039;Resolution and eye-box:&#039;&#039;&#039; The number of lenses across an HMD display determines how many views can be presented. Each lens typically covers a few hundred display pixels. Alignment is crucial: each sub-image must align to the user&#039;s eye position. Systems often include pupil steering (moving images to follow the eye) to maintain the eye-box. In the aforementioned scanning AR system&amp;lt;ref&lt;del style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;&amp;gt;Wei K. Near-eye augmented reality display using wide field-of-view scanning polarization pupil replication. University of California, Berkeley. 2023.&amp;lt;&lt;/del&gt;/&lt;del style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;ref&lt;/del&gt;&amp;gt;, the wide-FOV was achieved by a large lens array, but the resulting resolution per view was low because the 2 mm pitch limited how many sub-images could be rendered.&lt;/div&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;+&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #a3d3ff; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&#039;&#039;&#039;Resolution and eye-box:&#039;&#039;&#039; The number of lenses across an HMD display determines how many views can be presented. Each lens typically covers a few hundred display pixels. Alignment is crucial: each sub-image must align to the user&#039;s eye position. Systems often include pupil steering (moving images to follow the eye) to maintain the eye-box. In the aforementioned scanning AR system&amp;lt;ref &lt;ins style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;name=&quot;Wei2023&quot; &lt;/ins&gt;/&amp;gt;, the wide-FOV was achieved by a large lens array, but the resulting resolution per view was low because the 2 mm pitch limited how many sub-images could be rendered.&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;−&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #ffe49c; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&#039;&#039;&#039;Chromatic and optical aberrations:&#039;&#039;&#039; Simple refractive lenslets suffer from chromatic dispersion (different focal lengths per wavelength). As noted in integral imaging, chromatic aberration in MLAs &quot;reduces viewing quality&quot;.&amp;lt;ref&lt;del style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;&amp;gt;Li X, Chen L, Li Y, et al. A broadband achromatic metalens array for integral imaging in the visible. Light Sci Appl. 2019;8:99.&amp;lt;&lt;/del&gt;/&lt;del style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;ref&lt;/del&gt;&amp;gt; This is especially problematic for full-color displays. Achromatic doublet designs or advanced metalens lenses can correct this, but add complexity. Spherical aberration and field curvature within each lenslet also degrade sharpness if not carefully managed.&lt;/div&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;+&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #a3d3ff; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&#039;&#039;&#039;Chromatic and optical aberrations:&#039;&#039;&#039; Simple refractive lenslets suffer from chromatic dispersion (different focal lengths per wavelength). As noted in integral imaging, chromatic aberration in MLAs &quot;reduces viewing quality&quot;.&amp;lt;ref &lt;ins style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;name=&quot;Li2019&quot; &lt;/ins&gt;/&amp;gt; This is especially problematic for full-color displays. Achromatic doublet designs or advanced metalens lenses can correct this, but add complexity. Spherical aberration and field curvature within each lenslet also degrade sharpness if not carefully managed.&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;−&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #ffe49c; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&#039;&#039;&#039;Materials and manufacturing:&#039;&#039;&#039; Lens arrays are typically made in glass, plastic or polymer (e.g. PMMA, silicone) for refractive types. Holographic HOEs are recorded in photopolymers (e.g. Bayfol HX). Metasurface MLAs use high-index nanostructures (e.g. TiO₂) on a substrate.&amp;lt;ref&lt;del style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;&amp;gt;Li X, Chen L, Li Y, et al. A broadband achromatic metalens array for integral imaging in the visible. Light Sci Appl. 2019;8:99.&amp;lt;&lt;/del&gt;/&lt;del style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;ref&lt;/del&gt;&amp;gt; Manufacturing tolerances (surface roughness, lens height accuracy) critically affect performance. For example, a 1 µm error in a microlens height could shift focus by hundreds of micrometers.&lt;/div&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;+&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #a3d3ff; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&#039;&#039;&#039;Materials and manufacturing:&#039;&#039;&#039; Lens arrays are typically made in glass, plastic or polymer (e.g. PMMA, silicone) for refractive types. Holographic HOEs are recorded in photopolymers (e.g. Bayfol HX). Metasurface MLAs use high-index nanostructures (e.g. TiO₂) on a substrate.&amp;lt;ref &lt;ins style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;name=&quot;Li2019&quot; &lt;/ins&gt;/&amp;gt; Manufacturing tolerances (surface roughness, lens height accuracy) critically affect performance. For example, a 1 µm error in a microlens height could shift focus by hundreds of micrometers.&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;−&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #ffe49c; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;Overall, the technical design of a lens array involves a trade-off between FOV, resolution, brightness, and physical thickness. Emerging approaches like metalens arrays promise thinner optics with engineered dispersion&amp;lt;ref&lt;del style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;&amp;gt;Li X, Chen L, Li Y, et al. A broadband achromatic metalens array for integral imaging in the visible. Light Sci Appl. 2019;8:99.&amp;lt;&lt;/del&gt;/&lt;del style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;ref&amp;gt;&amp;lt;ref&amp;gt;Li X, Chen L, Li Y, et al. A broadband achromatic metalens array for integral imaging in the visible. Light Sci Appl. 2019;8:99.&amp;lt;/ref&lt;/del&gt;&amp;gt;, which may shift these trade-offs in future systems.&lt;/div&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;+&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #a3d3ff; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;Overall, the technical design of a lens array involves a trade-off between FOV, resolution, brightness, and physical thickness. Emerging approaches like metalens arrays promise thinner optics with engineered dispersion&amp;lt;ref &lt;ins style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;name=&quot;Li2019&quot; &lt;/ins&gt;/&amp;gt;, which may shift these trade-offs in future systems.&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;== Challenges ==&lt;/div&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;== Challenges ==&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td colspan=&quot;2&quot; class=&quot;diff-lineno&quot; id=&quot;mw-diff-left-l63&quot;&gt;Line 63:&lt;/td&gt;
&lt;td colspan=&quot;2&quot; class=&quot;diff-lineno&quot;&gt;Line 65:&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;Lens-array components face several challenges in VR/AR:&lt;/div&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;Lens-array components face several challenges in VR/AR:&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;−&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #ffe49c; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&#039;&#039;&#039;FoV–Resolution trade-off:&#039;&#039;&#039; Expanding the user&#039;s field of view typically requires more lenslets or larger lens aperture, but this reduces the angular (and thus spatial) resolution per view. Shin &#039;&#039;et al.&#039;&#039; showed that using a polarization grating could enlarge an MLA display&#039;s FoV from ~59° to 95°, but this was at the expense of needing sophisticated polarization control.&amp;lt;ref&lt;del style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;&amp;gt;Shin G, Lee Y, Kim J, et al. Field of view and angular-resolution enhancement in microlens array type virtual reality near-eye display using polarization grating. PubMed. 2023;39876217.&amp;lt;&lt;/del&gt;/&lt;del style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;ref&lt;/del&gt;&amp;gt; In many designs, improving one parameter (like FoV or brightness) degrades another.&lt;/div&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;+&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #a3d3ff; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&#039;&#039;&#039;FoV–Resolution trade-off:&#039;&#039;&#039; Expanding the user&#039;s field of view typically requires more lenslets or larger lens aperture, but this reduces the angular (and thus spatial) resolution per view. Shin &#039;&#039;et al.&#039;&#039; showed that using a polarization grating could enlarge an MLA display&#039;s FoV from ~59° to 95°, but this was at the expense of needing sophisticated polarization control.&amp;lt;ref &lt;ins style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;name=&quot;Shin2023&quot; &lt;/ins&gt;/&amp;gt; In many designs, improving one parameter (like FoV or brightness) degrades another.&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;−&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #ffe49c; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&#039;&#039;&#039;Chromatic aberration and color mixing:&#039;&#039;&#039; As noted earlier, MLAs inherently blur different colors unless achromatized. Achieving full-color images through a simple lens array is difficult.&amp;lt;ref&lt;del style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;&amp;gt;Li X, Chen L, Li Y, et al. A broadband achromatic metalens array for integral imaging in the visible. Light Sci Appl. 2019;8:99.&amp;lt;&lt;/del&gt;/&lt;del style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;ref&lt;/del&gt;&amp;gt; Some systems use color filter arrays or sequential-color illumination to mitigate this, but this adds complexity and can reduce brightness.&lt;/div&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;+&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #a3d3ff; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&#039;&#039;&#039;Chromatic aberration and color mixing:&#039;&#039;&#039; As noted earlier, MLAs inherently blur different colors unless achromatized. Achieving full-color images through a simple lens array is difficult.&amp;lt;ref &lt;ins style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;name=&quot;Li2019&quot; &lt;/ins&gt;/&amp;gt; Some systems use color filter arrays or sequential-color illumination to mitigate this, but this adds complexity and can reduce brightness.&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&amp;#039;&amp;#039;&amp;#039;Crosstalk and ghosting:&amp;#039;&amp;#039;&amp;#039; In multi-view displays, the images for adjacent views must not overlap. Small misalignments or imperfections cause crosstalk, where one eye sees part of the image intended for the other. This degrades 3D effect. In holographic see-through designs, incomplete isolation can cause ghost images of virtual content. Accurate fabrication and calibration are needed to minimize these artifacts.&lt;/div&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&amp;#039;&amp;#039;&amp;#039;Crosstalk and ghosting:&amp;#039;&amp;#039;&amp;#039; In multi-view displays, the images for adjacent views must not overlap. Small misalignments or imperfections cause crosstalk, where one eye sees part of the image intended for the other. This degrades 3D effect. In holographic see-through designs, incomplete isolation can cause ghost images of virtual content. Accurate fabrication and calibration are needed to minimize these artifacts.&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;−&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #ffe49c; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&#039;&#039;&#039;Eye-box and alignment:&#039;&#039;&#039; For near-eye applications, the exit pupil (eye-box) must accommodate user movement. Simple lens arrays can produce small, fixed eye-boxes. Techniques like eye-tracking (to move the image) or pupil duplication (multiple layered arrays) are required to ensure a reasonable viewing region. The scanning waveguide example noted that despite a wide FoV, the eye-box remained limited, and they attributed low resolution partly to the relatively large 2 mm lens pitch&amp;lt;ref&lt;del style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;&amp;gt;Wei K. Near-eye augmented reality display using wide field-of-view scanning polarization pupil replication. University of California, Berkeley. 2023.&amp;lt;&lt;/del&gt;/&lt;del style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;ref&lt;/del&gt;&amp;gt; (larger pitch reduced how finely the eye-box could be sampled).&lt;/div&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;+&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #a3d3ff; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&#039;&#039;&#039;Eye-box and alignment:&#039;&#039;&#039; For near-eye applications, the exit pupil (eye-box) must accommodate user movement. Simple lens arrays can produce small, fixed eye-boxes. Techniques like eye-tracking (to move the image) or pupil duplication (multiple layered arrays) are required to ensure a reasonable viewing region. The scanning waveguide example noted that despite a wide FoV, the eye-box remained limited, and they attributed low resolution partly to the relatively large 2 mm lens pitch&amp;lt;ref &lt;ins style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;name=&quot;Wei2023&quot; &lt;/ins&gt;/&amp;gt; (larger pitch reduced how finely the eye-box could be sampled).&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&amp;#039;&amp;#039;&amp;#039;Optical efficiency:&amp;#039;&amp;#039;&amp;#039; Each optical surface, grating, or holographic element can introduce loss. Adding an array of lenslets means more surfaces and potential Fresnel reflections. Diffractive elements (gratings, HOEs) often have limited efficiency bandwidth. Ensuring enough brightness in the final image is a common design hurdle, especially for battery-powered displays.&lt;/div&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&amp;#039;&amp;#039;&amp;#039;Optical efficiency:&amp;#039;&amp;#039;&amp;#039; Each optical surface, grating, or holographic element can introduce loss. Adding an array of lenslets means more surfaces and potential Fresnel reflections. Diffractive elements (gratings, HOEs) often have limited efficiency bandwidth. Ensuring enough brightness in the final image is a common design hurdle, especially for battery-powered displays.&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td colspan=&quot;2&quot; class=&quot;diff-lineno&quot; id=&quot;mw-diff-left-l79&quot;&gt;Line 79:&lt;/td&gt;
&lt;td colspan=&quot;2&quot; class=&quot;diff-lineno&quot;&gt;Line 81:&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;Research on lens-array technology is advancing rapidly. &amp;#039;&amp;#039;&amp;#039;Adaptive optics&amp;#039;&amp;#039;&amp;#039; will likely play a growing role. Arrays of liquid-crystal or shape-changing lenses could allow dynamic focus control and multi-focal displays (reducing vergence-accommodation conflict). Similarly, &amp;#039;&amp;#039;&amp;#039;dynamic wavelength control&amp;#039;&amp;#039;&amp;#039; (e.g. polarization or tunable filters in each lenslet) could enable spatiotemporal multiplexing for color and focus.&lt;/div&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;Research on lens-array technology is advancing rapidly. &amp;#039;&amp;#039;&amp;#039;Adaptive optics&amp;#039;&amp;#039;&amp;#039; will likely play a growing role. Arrays of liquid-crystal or shape-changing lenses could allow dynamic focus control and multi-focal displays (reducing vergence-accommodation conflict). Similarly, &amp;#039;&amp;#039;&amp;#039;dynamic wavelength control&amp;#039;&amp;#039;&amp;#039; (e.g. polarization or tunable filters in each lenslet) could enable spatiotemporal multiplexing for color and focus.&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;−&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #ffe49c; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&#039;&#039;&#039;Metasurfaces and flat optics&#039;&#039;&#039; are a major trend. Recent work has demonstrated &#039;&#039;achromatic metasurface waveguides&#039;&#039; for AR: for example, a 2025 Light:Science &amp;amp;Apps paper introduced inverse-designed metasurface couplers that eliminate chromatic aberration across the full visible spectrum and achieve ~45° FOV.&amp;lt;ref&amp;gt;An achromatic metasurface waveguide for augmented reality displays. Light Sci Appl. 2025;41377-025-01761.&amp;lt;/ref&amp;gt; These metasurface lens arrays are ultrathin and could replace bulky refractive MLAs in future headsets. Cholesteric liquid-crystal metasurface (chiral) lens arrays have already been used to break the field-of-view limit in a scanning AR display.&amp;lt;ref&lt;del style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;&amp;gt;Wei K. Near-eye augmented reality display using wide field-of-view scanning polarization pupil replication. University of California, Berkeley. 2023.&amp;lt;&lt;/del&gt;/&lt;del style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;ref&lt;/del&gt;&amp;gt;&lt;/div&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;+&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #a3d3ff; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&#039;&#039;&#039;Metasurfaces and flat optics&#039;&#039;&#039; are a major trend. Recent work has demonstrated &#039;&#039;achromatic metasurface waveguides&#039;&#039; for AR: for example, a 2025 Light:Science &amp;amp;Apps paper introduced inverse-designed metasurface couplers that eliminate chromatic aberration across the full visible spectrum and achieve ~45° FOV.&amp;lt;ref &lt;ins style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;name=&quot;Achromatic2025&quot;&lt;/ins&gt;&amp;gt;An achromatic metasurface waveguide for augmented reality displays. Light Sci Appl. 2025;41377-025-01761.&amp;lt;/ref&amp;gt; These metasurface lens arrays are ultrathin and could replace bulky refractive MLAs in future headsets. Cholesteric liquid-crystal metasurface (chiral) lens arrays have already been used to break the field-of-view limit in a scanning AR display.&amp;lt;ref &lt;ins style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;name=&quot;Wei2023&quot; &lt;/ins&gt;/&amp;gt;&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&amp;#039;&amp;#039;&amp;#039;Integration and compute-optics co-design&amp;#039;&amp;#039;&amp;#039; will improve performance. Headsets may co-optimize lens arrays with on-sensor processing. For instance, a microlens array camera could perform onboard refocusing or eye-pose estimation in hardware. Conversely, display side, algorithms could pre-distort images to compensate residual lens aberrations.&lt;/div&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&amp;#039;&amp;#039;&amp;#039;Integration and compute-optics co-design&amp;#039;&amp;#039;&amp;#039; will improve performance. Headsets may co-optimize lens arrays with on-sensor processing. For instance, a microlens array camera could perform onboard refocusing or eye-pose estimation in hardware. Conversely, display side, algorithms could pre-distort images to compensate residual lens aberrations.&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td colspan=&quot;2&quot; class=&quot;diff-lineno&quot; id=&quot;mw-diff-left-l85&quot;&gt;Line 85:&lt;/td&gt;
&lt;td colspan=&quot;2&quot; class=&quot;diff-lineno&quot;&gt;Line 87:&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&amp;#039;&amp;#039;&amp;#039;Higher-density arrays&amp;#039;&amp;#039;&amp;#039; and &amp;#039;&amp;#039;&amp;#039;monolithic fabrication&amp;#039;&amp;#039;&amp;#039; may emerge. Advances in 3D printing and nanoimprint lithography could yield integrated &amp;quot;optical wafers&amp;quot; combining display and MLA. Also, developments in holographic printing may allow recording complex lens-array HOEs on demand.&lt;/div&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;&amp;#039;&amp;#039;&amp;#039;Higher-density arrays&amp;#039;&amp;#039;&amp;#039; and &amp;#039;&amp;#039;&amp;#039;monolithic fabrication&amp;#039;&amp;#039;&amp;#039; may emerge. Advances in 3D printing and nanoimprint lithography could yield integrated &amp;quot;optical wafers&amp;quot; combining display and MLA. Also, developments in holographic printing may allow recording complex lens-array HOEs on demand.&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;−&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #ffe49c; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;In sensing, &#039;&#039;light-field cameras&#039;&#039; in miniaturized form will likely become standard in AR glasses for robust gaze and hand tracking, thanks to the flexibility demonstrated in patents and prototypes.&amp;lt;ref&lt;del style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;&amp;gt;Yang L, Guo Y. Eye tracking using a light field camera on a head-mounted display. US Patent Application US20180173303A1. 2018 Jun 21.&amp;lt;&lt;/del&gt;/&lt;del style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;ref&lt;/del&gt;&amp;gt;&amp;lt;ref&lt;del style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;&amp;gt;Microsoft. Camera comprising lens array. Patent Nweon. 2020;30768.&amp;lt;&lt;/del&gt;/&lt;del style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;ref&lt;/del&gt;&amp;gt;&lt;/div&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;+&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #a3d3ff; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;In sensing, &#039;&#039;light-field cameras&#039;&#039; in miniaturized form will likely become standard in AR glasses for robust gaze and hand tracking, thanks to the flexibility demonstrated in patents and prototypes.&amp;lt;ref &lt;ins style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;name=&quot;Yang2018&quot; &lt;/ins&gt;/&amp;gt;&amp;lt;ref &lt;ins style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;name=&quot;Microsoft2020&quot; &lt;/ins&gt;/&amp;gt;&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;−&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #ffe49c; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;As VR/AR systems aim for wider FOV, thinner form factors, and better realism, custom lens-array designs will continue to evolve. Each new generation of headsets (for example, employing pancake optics, multi-zone optics, or holographic waveguides) tends to reinvigorate lens-array innovation. In sum, lens arrays remain a key enabling technology for immersive displays and interactive sensing, with ongoing research focusing on mitigating their limitations and leveraging novel materials and computation.&amp;lt;ref&lt;del style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;&amp;gt;An achromatic metasurface waveguide for augmented reality displays. Light Sci Appl. 2025;41377-025-01761.&amp;lt;&lt;/del&gt;/&lt;del style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;ref&lt;/del&gt;&amp;gt;&amp;lt;ref&lt;del style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;&amp;gt;Wei K. Near-eye augmented reality display using wide field-of-view scanning polarization pupil replication. University of California, Berkeley. 2023.&amp;lt;&lt;/del&gt;/&lt;del style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;ref&lt;/del&gt;&amp;gt;&lt;/div&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot; data-marker=&quot;+&quot;&gt;&lt;/td&gt;&lt;td style=&quot;color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #a3d3ff; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;As VR/AR systems aim for wider FOV, thinner form factors, and better realism, custom lens-array designs will continue to evolve. Each new generation of headsets (for example, employing pancake optics, multi-zone optics, or holographic waveguides) tends to reinvigorate lens-array innovation. In sum, lens arrays remain a key enabling technology for immersive displays and interactive sensing, with ongoing research focusing on mitigating their limitations and leveraging novel materials and computation.&amp;lt;ref &lt;ins style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;name=&quot;Achromatic2025&quot; &lt;/ins&gt;/&amp;gt;&amp;lt;ref &lt;ins style=&quot;font-weight: bold; text-decoration: none;&quot;&gt;name=&quot;Wei2023&quot; &lt;/ins&gt;/&amp;gt;&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;br&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;tr&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;== References ==&lt;/div&gt;&lt;/td&gt;&lt;td class=&quot;diff-marker&quot;&gt;&lt;/td&gt;&lt;td style=&quot;background-color: #f8f9fa; color: #202122; font-size: 88%; border-style: solid; border-width: 1px 1px 1px 4px; border-radius: 0.33em; border-color: #eaecf0; vertical-align: top; white-space: pre-wrap;&quot;&gt;&lt;div&gt;== References ==&lt;/div&gt;&lt;/td&gt;&lt;/tr&gt;
&lt;/table&gt;</summary>
		<author><name>Xinreality</name></author>
	</entry>
	<entry>
		<id>https://vrarwiki.com/index.php?title=Lens_array&amp;diff=34712&amp;oldid=prev</id>
		<title>Xinreality at 02:23, 30 April 2025</title>
		<link rel="alternate" type="text/html" href="https://vrarwiki.com/index.php?title=Lens_array&amp;diff=34712&amp;oldid=prev"/>
		<updated>2025-04-30T02:23:10Z</updated>

		<summary type="html">&lt;p&gt;&lt;/p&gt;
&lt;a href=&quot;https://vrarwiki.com/index.php?title=Lens_array&amp;amp;diff=34712&amp;amp;oldid=34711&quot;&gt;Show changes&lt;/a&gt;</summary>
		<author><name>Xinreality</name></author>
	</entry>
	<entry>
		<id>https://vrarwiki.com/index.php?title=Lens_array&amp;diff=34711&amp;oldid=prev</id>
		<title>Xinreality at 02:16, 30 April 2025</title>
		<link rel="alternate" type="text/html" href="https://vrarwiki.com/index.php?title=Lens_array&amp;diff=34711&amp;oldid=prev"/>
		<updated>2025-04-30T02:16:43Z</updated>

		<summary type="html">&lt;p&gt;&lt;/p&gt;
&lt;a href=&quot;https://vrarwiki.com/index.php?title=Lens_array&amp;amp;diff=34711&amp;amp;oldid=34710&quot;&gt;Show changes&lt;/a&gt;</summary>
		<author><name>Xinreality</name></author>
	</entry>
</feed>